deepbox 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (173) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +344 -0
  3. package/dist/CSRMatrix-CwGwQRea.d.cts +219 -0
  4. package/dist/CSRMatrix-KzNt6QpS.d.ts +219 -0
  5. package/dist/Tensor-BQLk1ltW.d.cts +147 -0
  6. package/dist/Tensor-g8mUClel.d.ts +147 -0
  7. package/dist/chunk-4S73VUBD.js +677 -0
  8. package/dist/chunk-4S73VUBD.js.map +1 -0
  9. package/dist/chunk-5R4S63PF.js +2925 -0
  10. package/dist/chunk-5R4S63PF.js.map +1 -0
  11. package/dist/chunk-6AE5FKKQ.cjs +9264 -0
  12. package/dist/chunk-6AE5FKKQ.cjs.map +1 -0
  13. package/dist/chunk-AD436M45.js +3854 -0
  14. package/dist/chunk-AD436M45.js.map +1 -0
  15. package/dist/chunk-ALS7ETWZ.cjs +4263 -0
  16. package/dist/chunk-ALS7ETWZ.cjs.map +1 -0
  17. package/dist/chunk-AU7XHGKJ.js +2092 -0
  18. package/dist/chunk-AU7XHGKJ.js.map +1 -0
  19. package/dist/chunk-B5TNKUEY.js +1481 -0
  20. package/dist/chunk-B5TNKUEY.js.map +1 -0
  21. package/dist/chunk-BCR7G3A6.js +9136 -0
  22. package/dist/chunk-BCR7G3A6.js.map +1 -0
  23. package/dist/chunk-C4PKXY74.cjs +1917 -0
  24. package/dist/chunk-C4PKXY74.cjs.map +1 -0
  25. package/dist/chunk-DWZY6PIP.cjs +6400 -0
  26. package/dist/chunk-DWZY6PIP.cjs.map +1 -0
  27. package/dist/chunk-E3EU5FZO.cjs +2113 -0
  28. package/dist/chunk-E3EU5FZO.cjs.map +1 -0
  29. package/dist/chunk-F3JWBINJ.js +1054 -0
  30. package/dist/chunk-F3JWBINJ.js.map +1 -0
  31. package/dist/chunk-FJYLIGJX.js +1940 -0
  32. package/dist/chunk-FJYLIGJX.js.map +1 -0
  33. package/dist/chunk-JSCDE774.cjs +729 -0
  34. package/dist/chunk-JSCDE774.cjs.map +1 -0
  35. package/dist/chunk-LWECRCW2.cjs +2412 -0
  36. package/dist/chunk-LWECRCW2.cjs.map +1 -0
  37. package/dist/chunk-MLBMYKCG.js +6379 -0
  38. package/dist/chunk-MLBMYKCG.js.map +1 -0
  39. package/dist/chunk-OX6QXFMV.cjs +3874 -0
  40. package/dist/chunk-OX6QXFMV.cjs.map +1 -0
  41. package/dist/chunk-PHV2DKRS.cjs +1072 -0
  42. package/dist/chunk-PHV2DKRS.cjs.map +1 -0
  43. package/dist/chunk-PL7TAYKI.js +4056 -0
  44. package/dist/chunk-PL7TAYKI.js.map +1 -0
  45. package/dist/chunk-PR647I7R.js +1898 -0
  46. package/dist/chunk-PR647I7R.js.map +1 -0
  47. package/dist/chunk-QERHVCHC.cjs +2960 -0
  48. package/dist/chunk-QERHVCHC.cjs.map +1 -0
  49. package/dist/chunk-XEG44RF6.cjs +1514 -0
  50. package/dist/chunk-XEG44RF6.cjs.map +1 -0
  51. package/dist/chunk-XMWVME2W.js +2377 -0
  52. package/dist/chunk-XMWVME2W.js.map +1 -0
  53. package/dist/chunk-ZB75FESB.cjs +1979 -0
  54. package/dist/chunk-ZB75FESB.cjs.map +1 -0
  55. package/dist/chunk-ZLW62TJG.cjs +4061 -0
  56. package/dist/chunk-ZLW62TJG.cjs.map +1 -0
  57. package/dist/chunk-ZXKBDFP3.js +4235 -0
  58. package/dist/chunk-ZXKBDFP3.js.map +1 -0
  59. package/dist/core/index.cjs +204 -0
  60. package/dist/core/index.cjs.map +1 -0
  61. package/dist/core/index.d.cts +2 -0
  62. package/dist/core/index.d.ts +2 -0
  63. package/dist/core/index.js +3 -0
  64. package/dist/core/index.js.map +1 -0
  65. package/dist/dataframe/index.cjs +22 -0
  66. package/dist/dataframe/index.cjs.map +1 -0
  67. package/dist/dataframe/index.d.cts +3 -0
  68. package/dist/dataframe/index.d.ts +3 -0
  69. package/dist/dataframe/index.js +5 -0
  70. package/dist/dataframe/index.js.map +1 -0
  71. package/dist/datasets/index.cjs +134 -0
  72. package/dist/datasets/index.cjs.map +1 -0
  73. package/dist/datasets/index.d.cts +3 -0
  74. package/dist/datasets/index.d.ts +3 -0
  75. package/dist/datasets/index.js +5 -0
  76. package/dist/datasets/index.js.map +1 -0
  77. package/dist/index-74AB8Cyh.d.cts +1126 -0
  78. package/dist/index-9oQx1HgV.d.cts +1180 -0
  79. package/dist/index-BJY2SI4i.d.ts +483 -0
  80. package/dist/index-BWGhrDlr.d.ts +733 -0
  81. package/dist/index-B_DK4FKY.d.cts +242 -0
  82. package/dist/index-BbA2Gxfl.d.ts +456 -0
  83. package/dist/index-BgHYAoSS.d.cts +837 -0
  84. package/dist/index-BndMbqsM.d.ts +1439 -0
  85. package/dist/index-C1mfVYoo.d.ts +2517 -0
  86. package/dist/index-CCvlwAmL.d.cts +809 -0
  87. package/dist/index-CDw5CnOU.d.ts +785 -0
  88. package/dist/index-Cn3SdB0O.d.ts +1126 -0
  89. package/dist/index-CrqLlS-a.d.ts +776 -0
  90. package/dist/index-D61yaSMY.d.cts +483 -0
  91. package/dist/index-D9Loo1_A.d.cts +2517 -0
  92. package/dist/index-DIT_OO9C.d.cts +785 -0
  93. package/dist/index-DIp_RrRt.d.ts +242 -0
  94. package/dist/index-DbultU6X.d.cts +1427 -0
  95. package/dist/index-DmEg_LCm.d.cts +776 -0
  96. package/dist/index-DoPWVxPo.d.cts +1439 -0
  97. package/dist/index-DuCxd-8d.d.ts +837 -0
  98. package/dist/index-Dx42TZaY.d.ts +809 -0
  99. package/dist/index-DyZ4QQf5.d.cts +456 -0
  100. package/dist/index-GFAVyOWO.d.ts +1427 -0
  101. package/dist/index-WHQLn0e8.d.cts +733 -0
  102. package/dist/index-ZtI1Iy4L.d.ts +1180 -0
  103. package/dist/index-eJgeni9c.d.cts +1911 -0
  104. package/dist/index-tk4lSYod.d.ts +1911 -0
  105. package/dist/index.cjs +72 -0
  106. package/dist/index.cjs.map +1 -0
  107. package/dist/index.d.cts +17 -0
  108. package/dist/index.d.ts +17 -0
  109. package/dist/index.js +15 -0
  110. package/dist/index.js.map +1 -0
  111. package/dist/linalg/index.cjs +86 -0
  112. package/dist/linalg/index.cjs.map +1 -0
  113. package/dist/linalg/index.d.cts +3 -0
  114. package/dist/linalg/index.d.ts +3 -0
  115. package/dist/linalg/index.js +5 -0
  116. package/dist/linalg/index.js.map +1 -0
  117. package/dist/metrics/index.cjs +158 -0
  118. package/dist/metrics/index.cjs.map +1 -0
  119. package/dist/metrics/index.d.cts +3 -0
  120. package/dist/metrics/index.d.ts +3 -0
  121. package/dist/metrics/index.js +5 -0
  122. package/dist/metrics/index.js.map +1 -0
  123. package/dist/ml/index.cjs +87 -0
  124. package/dist/ml/index.cjs.map +1 -0
  125. package/dist/ml/index.d.cts +3 -0
  126. package/dist/ml/index.d.ts +3 -0
  127. package/dist/ml/index.js +6 -0
  128. package/dist/ml/index.js.map +1 -0
  129. package/dist/ndarray/index.cjs +501 -0
  130. package/dist/ndarray/index.cjs.map +1 -0
  131. package/dist/ndarray/index.d.cts +5 -0
  132. package/dist/ndarray/index.d.ts +5 -0
  133. package/dist/ndarray/index.js +4 -0
  134. package/dist/ndarray/index.js.map +1 -0
  135. package/dist/nn/index.cjs +142 -0
  136. package/dist/nn/index.cjs.map +1 -0
  137. package/dist/nn/index.d.cts +6 -0
  138. package/dist/nn/index.d.ts +6 -0
  139. package/dist/nn/index.js +5 -0
  140. package/dist/nn/index.js.map +1 -0
  141. package/dist/optim/index.cjs +77 -0
  142. package/dist/optim/index.cjs.map +1 -0
  143. package/dist/optim/index.d.cts +4 -0
  144. package/dist/optim/index.d.ts +4 -0
  145. package/dist/optim/index.js +4 -0
  146. package/dist/optim/index.js.map +1 -0
  147. package/dist/plot/index.cjs +114 -0
  148. package/dist/plot/index.cjs.map +1 -0
  149. package/dist/plot/index.d.cts +6 -0
  150. package/dist/plot/index.d.ts +6 -0
  151. package/dist/plot/index.js +5 -0
  152. package/dist/plot/index.js.map +1 -0
  153. package/dist/preprocess/index.cjs +82 -0
  154. package/dist/preprocess/index.cjs.map +1 -0
  155. package/dist/preprocess/index.d.cts +4 -0
  156. package/dist/preprocess/index.d.ts +4 -0
  157. package/dist/preprocess/index.js +5 -0
  158. package/dist/preprocess/index.js.map +1 -0
  159. package/dist/random/index.cjs +74 -0
  160. package/dist/random/index.cjs.map +1 -0
  161. package/dist/random/index.d.cts +3 -0
  162. package/dist/random/index.d.ts +3 -0
  163. package/dist/random/index.js +5 -0
  164. package/dist/random/index.js.map +1 -0
  165. package/dist/stats/index.cjs +142 -0
  166. package/dist/stats/index.cjs.map +1 -0
  167. package/dist/stats/index.d.cts +3 -0
  168. package/dist/stats/index.d.ts +3 -0
  169. package/dist/stats/index.js +5 -0
  170. package/dist/stats/index.js.map +1 -0
  171. package/dist/tensor-B96jjJLQ.d.cts +205 -0
  172. package/dist/tensor-B96jjJLQ.d.ts +205 -0
  173. package/package.json +226 -0
@@ -0,0 +1,242 @@
1
+ import { A as Axis, D as DType, S as Shape, b as TypedArray } from './tensor-B96jjJLQ.js';
2
+ import { T as Tensor } from './Tensor-g8mUClel.js';
3
+
4
+ type SliceRange = number | {
5
+ readonly start?: number;
6
+ readonly end?: number;
7
+ readonly step?: number;
8
+ };
9
+ /**
10
+ * Slice a tensor.
11
+ *
12
+ * Examples:
13
+ * - `slice(t, { start: 0, end: 2 })` on a 1D tensor keeps the first 2 elements.
14
+ * - `slice(t, 0, { start: 1 })` on a 2D tensor selects row 0 and columns from 1.
15
+ */
16
+ declare function slice(t: Tensor, ...ranges: SliceRange[]): Tensor;
17
+ /**
18
+ * Gather values along an axis specified by indices.
19
+ *
20
+ * @param t - Input tensor
21
+ * @param indices - Indices to gather
22
+ * @param axis - Axis along which to gather
23
+ * @returns Gathered tensor
24
+ *
25
+ * @example
26
+ * ```ts
27
+ * const t = tensor([[1, 2], [3, 4], [5, 6]]);
28
+ * const indices = tensor([0, 2]);
29
+ * const result = gather(t, indices, 0); // [[1, 2], [5, 6]]
30
+ * ```
31
+ */
32
+ declare function gather(t: Tensor, indices: Tensor, axis: Axis): Tensor;
33
+
34
+ /**
35
+ * Autograd module for automatic differentiation.
36
+ *
37
+ * Implements reverse-mode automatic differentiation (backpropagation)
38
+ * for `Tensor` operations.
39
+ *
40
+ * ## Gradient state
41
+ *
42
+ * A **module-level singleton** `gradEnabled` controls whether new
43
+ * operations record their backward graph. Use {@link noGrad} to
44
+ * temporarily disable gradient tracking (e.g. during inference).
45
+ * `noGrad` only accepts **synchronous** callbacks — passing an async
46
+ * function will throw, because the flag would be restored before the
47
+ * async work completes.
48
+ *
49
+ * ## max / min backward — tie-breaking
50
+ *
51
+ * When multiple elements share the maximum (or minimum) value along the
52
+ * reduced axis, **all** tied positions receive gradient. This means the
53
+ * gradient is *not* divided among ties — each tied element gets the full
54
+ * upstream gradient. This matches PyTorch's behaviour and avoids the
55
+ * cost of counting ties, but callers should be aware that the
56
+ * "effective" gradient magnitude is multiplied by the tie count.
57
+ */
58
+
59
+ type GradTensorOptions = {
60
+ readonly requiresGrad?: boolean;
61
+ readonly dtype?: Exclude<DType, "string">;
62
+ };
63
+ type BackwardFn = () => void;
64
+ /**
65
+ * Tensor wrapper that records a computation graph for reverse-mode autodiff.
66
+ */
67
+ declare class GradTensor {
68
+ readonly tensor: Tensor;
69
+ requiresGrad: boolean;
70
+ private _grad;
71
+ private readonly _prev;
72
+ private readonly _backward;
73
+ private constructor();
74
+ static create(args: {
75
+ readonly tensor: Tensor;
76
+ readonly requiresGrad: boolean;
77
+ readonly prev: readonly GradTensor[];
78
+ readonly backward: BackwardFn;
79
+ }): GradTensor;
80
+ static fromTensor(t: Tensor, options?: GradTensorOptions): GradTensor;
81
+ static scalar(value: number, options?: GradTensorOptions): GradTensor;
82
+ /**
83
+ * Get the shape of the underlying tensor.
84
+ * Implements TensorLike interface for compatibility with Tensor.
85
+ */
86
+ get shape(): Shape;
87
+ /**
88
+ * Get the total number of elements.
89
+ * Implements TensorLike interface for compatibility with Tensor.
90
+ */
91
+ get size(): number;
92
+ /**
93
+ * Get the number of dimensions.
94
+ * Implements TensorLike interface for compatibility with Tensor.
95
+ */
96
+ get ndim(): number;
97
+ /**
98
+ * Get the data type of the underlying tensor.
99
+ * Implements TensorLike interface for compatibility with Tensor.
100
+ */
101
+ get dtype(): DType;
102
+ /**
103
+ * Get the device where the tensor resides.
104
+ * Implements TensorLike interface for compatibility with Tensor.
105
+ */
106
+ get device(): Tensor["device"];
107
+ /**
108
+ * Get the memory strides of the underlying tensor.
109
+ * Implements TensorLike interface for compatibility with Tensor.
110
+ */
111
+ get strides(): readonly number[];
112
+ /**
113
+ * Get the offset into the underlying data buffer.
114
+ * Implements TensorLike interface for compatibility with Tensor.
115
+ */
116
+ get offset(): number;
117
+ /**
118
+ * Get the underlying data buffer.
119
+ * Implements TensorLike interface for compatibility with Tensor.
120
+ */
121
+ get data(): TypedArray;
122
+ /**
123
+ * Get the accumulated gradient for this tensor.
124
+ * Returns null if no gradient has been computed yet.
125
+ */
126
+ get grad(): Tensor | null;
127
+ setGrad(grad: Tensor): void;
128
+ zeroGrad(): void;
129
+ detach(): GradTensor;
130
+ setRequiresGrad(value: boolean): void;
131
+ hasGrad(): boolean;
132
+ /** @internal */
133
+ accumulateGrad(grad: Tensor): void;
134
+ /**
135
+ * Backpropagate gradients from this node through the recorded graph.
136
+ */
137
+ backward(grad?: Tensor): void;
138
+ add(other: GradTensor): GradTensor;
139
+ sub(other: GradTensor): GradTensor;
140
+ mul(other: GradTensor): GradTensor;
141
+ neg(): GradTensor;
142
+ sum(axis?: Axis, keepdims?: boolean): GradTensor;
143
+ div(other: GradTensor): GradTensor;
144
+ pow(exponent: number): GradTensor;
145
+ sqrt(): GradTensor;
146
+ matmul(other: GradTensor): GradTensor;
147
+ relu(): GradTensor;
148
+ sigmoid(): GradTensor;
149
+ square(): GradTensor;
150
+ exp(): GradTensor;
151
+ log(): GradTensor;
152
+ tanh(): GradTensor;
153
+ slice(...args: SliceRange[]): GradTensor;
154
+ gather(indices: GradTensor, axis: Axis): GradTensor;
155
+ mean(axis?: Axis, keepdims?: boolean): GradTensor;
156
+ max(axis?: Axis, keepdims?: boolean): GradTensor;
157
+ /**
158
+ * Reshape the GradTensor to a new shape without copying data.
159
+ *
160
+ * Returns a new GradTensor with the specified shape. The underlying tensor
161
+ * is reshaped, and gradient computation is preserved through the reshape operation.
162
+ *
163
+ * @param newShape - The desired shape for the tensor
164
+ * @returns A new GradTensor with the specified shape
165
+ * @throws {ShapeError} If the new shape is incompatible with the tensor's size
166
+ *
167
+ * @example
168
+ * ```ts
169
+ * const t = parameter([1, 2, 3, 4, 5, 6]);
170
+ * const reshaped = t.reshape([2, 3]);
171
+ * console.log(reshaped.shape); // [2, 3]
172
+ * ```
173
+ */
174
+ reshape(newShape: Shape): GradTensor;
175
+ /**
176
+ * Flatten the GradTensor to a 1-dimensional array.
177
+ *
178
+ * Returns a new 1D GradTensor containing all elements.
179
+ *
180
+ * @returns A 1D GradTensor with shape [size]
181
+ *
182
+ * @example
183
+ * ```ts
184
+ * const matrix = parameter([[1, 2, 3], [4, 5, 6]]);
185
+ * const flat = matrix.flatten();
186
+ * console.log(flat.shape); // [6]
187
+ * ```
188
+ */
189
+ flatten(): GradTensor;
190
+ /**
191
+ * Create a view of the GradTensor with a different shape.
192
+ *
193
+ * Similar to reshape but uses the underlying tensor's view method.
194
+ *
195
+ * @param shape - The desired shape for the view
196
+ * @param strides - Optional custom strides
197
+ * @param offset - Optional offset into the data buffer
198
+ * @returns A new GradTensor view with the specified shape
199
+ */
200
+ view(shape: Shape, strides?: readonly number[], offset?: number): GradTensor;
201
+ transpose(axes?: readonly number[]): GradTensor;
202
+ min(axis?: Axis, keepdims?: boolean): GradTensor;
203
+ abs(): GradTensor;
204
+ clip(minVal: number, maxVal: number): GradTensor;
205
+ leakyRelu(negativeSlope?: number): GradTensor;
206
+ elu(alpha?: number): GradTensor;
207
+ gelu(): GradTensor;
208
+ /**
209
+ * Return a human-readable string representation of this GradTensor.
210
+ *
211
+ * Delegates to the underlying {@link Tensor.toString} and appends
212
+ * gradient metadata.
213
+ *
214
+ * @param maxElements - Maximum elements per dimension before summarizing (default: 6).
215
+ * @returns Formatted string representation
216
+ */
217
+ toString(maxElements?: number): string;
218
+ }
219
+ /**
220
+ * Create a GradTensor with requiresGrad=true.
221
+ */
222
+ declare function parameter(data: number | number[] | number[][] | number[][][] | Tensor, options?: GradTensorOptions): GradTensor;
223
+ /**
224
+ * Context manager to disable gradient calculation.
225
+ *
226
+ * **Important:** The callback must be synchronous. Passing an async function
227
+ * will cause `gradEnabled` to be restored before the awaited work finishes,
228
+ * silently breaking gradient tracking inside the async continuation.
229
+ *
230
+ * @throws {DeepboxError} If the callback returns a Promise (async function detected)
231
+ */
232
+ declare function noGrad<T>(fn: () => T): T;
233
+ /**
234
+ * Image to Column operation for GradTensor.
235
+ */
236
+ declare function im2col(input: GradTensor, kernelSize: [number, number], stride: [number, number], padding: [number, number]): GradTensor;
237
+ declare function softmax(input: GradTensor, axis?: number): GradTensor;
238
+ declare function logSoftmax(input: GradTensor, axis?: number): GradTensor;
239
+ declare function variance(input: GradTensor, axis?: number, correction?: number): GradTensor;
240
+ declare function dropout(input: GradTensor, p?: number, training?: boolean): GradTensor;
241
+
242
+ export { GradTensor as G, type SliceRange as S, type GradTensorOptions as a, softmax as b, dropout as d, gather as g, im2col as i, logSoftmax as l, noGrad as n, parameter as p, slice as s, variance as v };