deepbox 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +344 -0
- package/dist/CSRMatrix-CwGwQRea.d.cts +219 -0
- package/dist/CSRMatrix-KzNt6QpS.d.ts +219 -0
- package/dist/Tensor-BQLk1ltW.d.cts +147 -0
- package/dist/Tensor-g8mUClel.d.ts +147 -0
- package/dist/chunk-4S73VUBD.js +677 -0
- package/dist/chunk-4S73VUBD.js.map +1 -0
- package/dist/chunk-5R4S63PF.js +2925 -0
- package/dist/chunk-5R4S63PF.js.map +1 -0
- package/dist/chunk-6AE5FKKQ.cjs +9264 -0
- package/dist/chunk-6AE5FKKQ.cjs.map +1 -0
- package/dist/chunk-AD436M45.js +3854 -0
- package/dist/chunk-AD436M45.js.map +1 -0
- package/dist/chunk-ALS7ETWZ.cjs +4263 -0
- package/dist/chunk-ALS7ETWZ.cjs.map +1 -0
- package/dist/chunk-AU7XHGKJ.js +2092 -0
- package/dist/chunk-AU7XHGKJ.js.map +1 -0
- package/dist/chunk-B5TNKUEY.js +1481 -0
- package/dist/chunk-B5TNKUEY.js.map +1 -0
- package/dist/chunk-BCR7G3A6.js +9136 -0
- package/dist/chunk-BCR7G3A6.js.map +1 -0
- package/dist/chunk-C4PKXY74.cjs +1917 -0
- package/dist/chunk-C4PKXY74.cjs.map +1 -0
- package/dist/chunk-DWZY6PIP.cjs +6400 -0
- package/dist/chunk-DWZY6PIP.cjs.map +1 -0
- package/dist/chunk-E3EU5FZO.cjs +2113 -0
- package/dist/chunk-E3EU5FZO.cjs.map +1 -0
- package/dist/chunk-F3JWBINJ.js +1054 -0
- package/dist/chunk-F3JWBINJ.js.map +1 -0
- package/dist/chunk-FJYLIGJX.js +1940 -0
- package/dist/chunk-FJYLIGJX.js.map +1 -0
- package/dist/chunk-JSCDE774.cjs +729 -0
- package/dist/chunk-JSCDE774.cjs.map +1 -0
- package/dist/chunk-LWECRCW2.cjs +2412 -0
- package/dist/chunk-LWECRCW2.cjs.map +1 -0
- package/dist/chunk-MLBMYKCG.js +6379 -0
- package/dist/chunk-MLBMYKCG.js.map +1 -0
- package/dist/chunk-OX6QXFMV.cjs +3874 -0
- package/dist/chunk-OX6QXFMV.cjs.map +1 -0
- package/dist/chunk-PHV2DKRS.cjs +1072 -0
- package/dist/chunk-PHV2DKRS.cjs.map +1 -0
- package/dist/chunk-PL7TAYKI.js +4056 -0
- package/dist/chunk-PL7TAYKI.js.map +1 -0
- package/dist/chunk-PR647I7R.js +1898 -0
- package/dist/chunk-PR647I7R.js.map +1 -0
- package/dist/chunk-QERHVCHC.cjs +2960 -0
- package/dist/chunk-QERHVCHC.cjs.map +1 -0
- package/dist/chunk-XEG44RF6.cjs +1514 -0
- package/dist/chunk-XEG44RF6.cjs.map +1 -0
- package/dist/chunk-XMWVME2W.js +2377 -0
- package/dist/chunk-XMWVME2W.js.map +1 -0
- package/dist/chunk-ZB75FESB.cjs +1979 -0
- package/dist/chunk-ZB75FESB.cjs.map +1 -0
- package/dist/chunk-ZLW62TJG.cjs +4061 -0
- package/dist/chunk-ZLW62TJG.cjs.map +1 -0
- package/dist/chunk-ZXKBDFP3.js +4235 -0
- package/dist/chunk-ZXKBDFP3.js.map +1 -0
- package/dist/core/index.cjs +204 -0
- package/dist/core/index.cjs.map +1 -0
- package/dist/core/index.d.cts +2 -0
- package/dist/core/index.d.ts +2 -0
- package/dist/core/index.js +3 -0
- package/dist/core/index.js.map +1 -0
- package/dist/dataframe/index.cjs +22 -0
- package/dist/dataframe/index.cjs.map +1 -0
- package/dist/dataframe/index.d.cts +3 -0
- package/dist/dataframe/index.d.ts +3 -0
- package/dist/dataframe/index.js +5 -0
- package/dist/dataframe/index.js.map +1 -0
- package/dist/datasets/index.cjs +134 -0
- package/dist/datasets/index.cjs.map +1 -0
- package/dist/datasets/index.d.cts +3 -0
- package/dist/datasets/index.d.ts +3 -0
- package/dist/datasets/index.js +5 -0
- package/dist/datasets/index.js.map +1 -0
- package/dist/index-74AB8Cyh.d.cts +1126 -0
- package/dist/index-9oQx1HgV.d.cts +1180 -0
- package/dist/index-BJY2SI4i.d.ts +483 -0
- package/dist/index-BWGhrDlr.d.ts +733 -0
- package/dist/index-B_DK4FKY.d.cts +242 -0
- package/dist/index-BbA2Gxfl.d.ts +456 -0
- package/dist/index-BgHYAoSS.d.cts +837 -0
- package/dist/index-BndMbqsM.d.ts +1439 -0
- package/dist/index-C1mfVYoo.d.ts +2517 -0
- package/dist/index-CCvlwAmL.d.cts +809 -0
- package/dist/index-CDw5CnOU.d.ts +785 -0
- package/dist/index-Cn3SdB0O.d.ts +1126 -0
- package/dist/index-CrqLlS-a.d.ts +776 -0
- package/dist/index-D61yaSMY.d.cts +483 -0
- package/dist/index-D9Loo1_A.d.cts +2517 -0
- package/dist/index-DIT_OO9C.d.cts +785 -0
- package/dist/index-DIp_RrRt.d.ts +242 -0
- package/dist/index-DbultU6X.d.cts +1427 -0
- package/dist/index-DmEg_LCm.d.cts +776 -0
- package/dist/index-DoPWVxPo.d.cts +1439 -0
- package/dist/index-DuCxd-8d.d.ts +837 -0
- package/dist/index-Dx42TZaY.d.ts +809 -0
- package/dist/index-DyZ4QQf5.d.cts +456 -0
- package/dist/index-GFAVyOWO.d.ts +1427 -0
- package/dist/index-WHQLn0e8.d.cts +733 -0
- package/dist/index-ZtI1Iy4L.d.ts +1180 -0
- package/dist/index-eJgeni9c.d.cts +1911 -0
- package/dist/index-tk4lSYod.d.ts +1911 -0
- package/dist/index.cjs +72 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +17 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.js +15 -0
- package/dist/index.js.map +1 -0
- package/dist/linalg/index.cjs +86 -0
- package/dist/linalg/index.cjs.map +1 -0
- package/dist/linalg/index.d.cts +3 -0
- package/dist/linalg/index.d.ts +3 -0
- package/dist/linalg/index.js +5 -0
- package/dist/linalg/index.js.map +1 -0
- package/dist/metrics/index.cjs +158 -0
- package/dist/metrics/index.cjs.map +1 -0
- package/dist/metrics/index.d.cts +3 -0
- package/dist/metrics/index.d.ts +3 -0
- package/dist/metrics/index.js +5 -0
- package/dist/metrics/index.js.map +1 -0
- package/dist/ml/index.cjs +87 -0
- package/dist/ml/index.cjs.map +1 -0
- package/dist/ml/index.d.cts +3 -0
- package/dist/ml/index.d.ts +3 -0
- package/dist/ml/index.js +6 -0
- package/dist/ml/index.js.map +1 -0
- package/dist/ndarray/index.cjs +501 -0
- package/dist/ndarray/index.cjs.map +1 -0
- package/dist/ndarray/index.d.cts +5 -0
- package/dist/ndarray/index.d.ts +5 -0
- package/dist/ndarray/index.js +4 -0
- package/dist/ndarray/index.js.map +1 -0
- package/dist/nn/index.cjs +142 -0
- package/dist/nn/index.cjs.map +1 -0
- package/dist/nn/index.d.cts +6 -0
- package/dist/nn/index.d.ts +6 -0
- package/dist/nn/index.js +5 -0
- package/dist/nn/index.js.map +1 -0
- package/dist/optim/index.cjs +77 -0
- package/dist/optim/index.cjs.map +1 -0
- package/dist/optim/index.d.cts +4 -0
- package/dist/optim/index.d.ts +4 -0
- package/dist/optim/index.js +4 -0
- package/dist/optim/index.js.map +1 -0
- package/dist/plot/index.cjs +114 -0
- package/dist/plot/index.cjs.map +1 -0
- package/dist/plot/index.d.cts +6 -0
- package/dist/plot/index.d.ts +6 -0
- package/dist/plot/index.js +5 -0
- package/dist/plot/index.js.map +1 -0
- package/dist/preprocess/index.cjs +82 -0
- package/dist/preprocess/index.cjs.map +1 -0
- package/dist/preprocess/index.d.cts +4 -0
- package/dist/preprocess/index.d.ts +4 -0
- package/dist/preprocess/index.js +5 -0
- package/dist/preprocess/index.js.map +1 -0
- package/dist/random/index.cjs +74 -0
- package/dist/random/index.cjs.map +1 -0
- package/dist/random/index.d.cts +3 -0
- package/dist/random/index.d.ts +3 -0
- package/dist/random/index.js +5 -0
- package/dist/random/index.js.map +1 -0
- package/dist/stats/index.cjs +142 -0
- package/dist/stats/index.cjs.map +1 -0
- package/dist/stats/index.d.cts +3 -0
- package/dist/stats/index.d.ts +3 -0
- package/dist/stats/index.js +5 -0
- package/dist/stats/index.js.map +1 -0
- package/dist/tensor-B96jjJLQ.d.cts +205 -0
- package/dist/tensor-B96jjJLQ.d.ts +205 -0
- package/package.json +226 -0
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import { A as Axis, D as DType, S as Shape, b as TypedArray } from './tensor-B96jjJLQ.js';
|
|
2
|
+
import { T as Tensor } from './Tensor-g8mUClel.js';
|
|
3
|
+
|
|
4
|
+
type SliceRange = number | {
|
|
5
|
+
readonly start?: number;
|
|
6
|
+
readonly end?: number;
|
|
7
|
+
readonly step?: number;
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
* Slice a tensor.
|
|
11
|
+
*
|
|
12
|
+
* Examples:
|
|
13
|
+
* - `slice(t, { start: 0, end: 2 })` on a 1D tensor keeps the first 2 elements.
|
|
14
|
+
* - `slice(t, 0, { start: 1 })` on a 2D tensor selects row 0 and columns from 1.
|
|
15
|
+
*/
|
|
16
|
+
declare function slice(t: Tensor, ...ranges: SliceRange[]): Tensor;
|
|
17
|
+
/**
|
|
18
|
+
* Gather values along an axis specified by indices.
|
|
19
|
+
*
|
|
20
|
+
* @param t - Input tensor
|
|
21
|
+
* @param indices - Indices to gather
|
|
22
|
+
* @param axis - Axis along which to gather
|
|
23
|
+
* @returns Gathered tensor
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* ```ts
|
|
27
|
+
* const t = tensor([[1, 2], [3, 4], [5, 6]]);
|
|
28
|
+
* const indices = tensor([0, 2]);
|
|
29
|
+
* const result = gather(t, indices, 0); // [[1, 2], [5, 6]]
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
declare function gather(t: Tensor, indices: Tensor, axis: Axis): Tensor;
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Autograd module for automatic differentiation.
|
|
36
|
+
*
|
|
37
|
+
* Implements reverse-mode automatic differentiation (backpropagation)
|
|
38
|
+
* for `Tensor` operations.
|
|
39
|
+
*
|
|
40
|
+
* ## Gradient state
|
|
41
|
+
*
|
|
42
|
+
* A **module-level singleton** `gradEnabled` controls whether new
|
|
43
|
+
* operations record their backward graph. Use {@link noGrad} to
|
|
44
|
+
* temporarily disable gradient tracking (e.g. during inference).
|
|
45
|
+
* `noGrad` only accepts **synchronous** callbacks — passing an async
|
|
46
|
+
* function will throw, because the flag would be restored before the
|
|
47
|
+
* async work completes.
|
|
48
|
+
*
|
|
49
|
+
* ## max / min backward — tie-breaking
|
|
50
|
+
*
|
|
51
|
+
* When multiple elements share the maximum (or minimum) value along the
|
|
52
|
+
* reduced axis, **all** tied positions receive gradient. This means the
|
|
53
|
+
* gradient is *not* divided among ties — each tied element gets the full
|
|
54
|
+
* upstream gradient. This matches PyTorch's behaviour and avoids the
|
|
55
|
+
* cost of counting ties, but callers should be aware that the
|
|
56
|
+
* "effective" gradient magnitude is multiplied by the tie count.
|
|
57
|
+
*/
|
|
58
|
+
|
|
59
|
+
type GradTensorOptions = {
|
|
60
|
+
readonly requiresGrad?: boolean;
|
|
61
|
+
readonly dtype?: Exclude<DType, "string">;
|
|
62
|
+
};
|
|
63
|
+
type BackwardFn = () => void;
|
|
64
|
+
/**
|
|
65
|
+
* Tensor wrapper that records a computation graph for reverse-mode autodiff.
|
|
66
|
+
*/
|
|
67
|
+
declare class GradTensor {
|
|
68
|
+
readonly tensor: Tensor;
|
|
69
|
+
requiresGrad: boolean;
|
|
70
|
+
private _grad;
|
|
71
|
+
private readonly _prev;
|
|
72
|
+
private readonly _backward;
|
|
73
|
+
private constructor();
|
|
74
|
+
static create(args: {
|
|
75
|
+
readonly tensor: Tensor;
|
|
76
|
+
readonly requiresGrad: boolean;
|
|
77
|
+
readonly prev: readonly GradTensor[];
|
|
78
|
+
readonly backward: BackwardFn;
|
|
79
|
+
}): GradTensor;
|
|
80
|
+
static fromTensor(t: Tensor, options?: GradTensorOptions): GradTensor;
|
|
81
|
+
static scalar(value: number, options?: GradTensorOptions): GradTensor;
|
|
82
|
+
/**
|
|
83
|
+
* Get the shape of the underlying tensor.
|
|
84
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
85
|
+
*/
|
|
86
|
+
get shape(): Shape;
|
|
87
|
+
/**
|
|
88
|
+
* Get the total number of elements.
|
|
89
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
90
|
+
*/
|
|
91
|
+
get size(): number;
|
|
92
|
+
/**
|
|
93
|
+
* Get the number of dimensions.
|
|
94
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
95
|
+
*/
|
|
96
|
+
get ndim(): number;
|
|
97
|
+
/**
|
|
98
|
+
* Get the data type of the underlying tensor.
|
|
99
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
100
|
+
*/
|
|
101
|
+
get dtype(): DType;
|
|
102
|
+
/**
|
|
103
|
+
* Get the device where the tensor resides.
|
|
104
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
105
|
+
*/
|
|
106
|
+
get device(): Tensor["device"];
|
|
107
|
+
/**
|
|
108
|
+
* Get the memory strides of the underlying tensor.
|
|
109
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
110
|
+
*/
|
|
111
|
+
get strides(): readonly number[];
|
|
112
|
+
/**
|
|
113
|
+
* Get the offset into the underlying data buffer.
|
|
114
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
115
|
+
*/
|
|
116
|
+
get offset(): number;
|
|
117
|
+
/**
|
|
118
|
+
* Get the underlying data buffer.
|
|
119
|
+
* Implements TensorLike interface for compatibility with Tensor.
|
|
120
|
+
*/
|
|
121
|
+
get data(): TypedArray;
|
|
122
|
+
/**
|
|
123
|
+
* Get the accumulated gradient for this tensor.
|
|
124
|
+
* Returns null if no gradient has been computed yet.
|
|
125
|
+
*/
|
|
126
|
+
get grad(): Tensor | null;
|
|
127
|
+
setGrad(grad: Tensor): void;
|
|
128
|
+
zeroGrad(): void;
|
|
129
|
+
detach(): GradTensor;
|
|
130
|
+
setRequiresGrad(value: boolean): void;
|
|
131
|
+
hasGrad(): boolean;
|
|
132
|
+
/** @internal */
|
|
133
|
+
accumulateGrad(grad: Tensor): void;
|
|
134
|
+
/**
|
|
135
|
+
* Backpropagate gradients from this node through the recorded graph.
|
|
136
|
+
*/
|
|
137
|
+
backward(grad?: Tensor): void;
|
|
138
|
+
add(other: GradTensor): GradTensor;
|
|
139
|
+
sub(other: GradTensor): GradTensor;
|
|
140
|
+
mul(other: GradTensor): GradTensor;
|
|
141
|
+
neg(): GradTensor;
|
|
142
|
+
sum(axis?: Axis, keepdims?: boolean): GradTensor;
|
|
143
|
+
div(other: GradTensor): GradTensor;
|
|
144
|
+
pow(exponent: number): GradTensor;
|
|
145
|
+
sqrt(): GradTensor;
|
|
146
|
+
matmul(other: GradTensor): GradTensor;
|
|
147
|
+
relu(): GradTensor;
|
|
148
|
+
sigmoid(): GradTensor;
|
|
149
|
+
square(): GradTensor;
|
|
150
|
+
exp(): GradTensor;
|
|
151
|
+
log(): GradTensor;
|
|
152
|
+
tanh(): GradTensor;
|
|
153
|
+
slice(...args: SliceRange[]): GradTensor;
|
|
154
|
+
gather(indices: GradTensor, axis: Axis): GradTensor;
|
|
155
|
+
mean(axis?: Axis, keepdims?: boolean): GradTensor;
|
|
156
|
+
max(axis?: Axis, keepdims?: boolean): GradTensor;
|
|
157
|
+
/**
|
|
158
|
+
* Reshape the GradTensor to a new shape without copying data.
|
|
159
|
+
*
|
|
160
|
+
* Returns a new GradTensor with the specified shape. The underlying tensor
|
|
161
|
+
* is reshaped, and gradient computation is preserved through the reshape operation.
|
|
162
|
+
*
|
|
163
|
+
* @param newShape - The desired shape for the tensor
|
|
164
|
+
* @returns A new GradTensor with the specified shape
|
|
165
|
+
* @throws {ShapeError} If the new shape is incompatible with the tensor's size
|
|
166
|
+
*
|
|
167
|
+
* @example
|
|
168
|
+
* ```ts
|
|
169
|
+
* const t = parameter([1, 2, 3, 4, 5, 6]);
|
|
170
|
+
* const reshaped = t.reshape([2, 3]);
|
|
171
|
+
* console.log(reshaped.shape); // [2, 3]
|
|
172
|
+
* ```
|
|
173
|
+
*/
|
|
174
|
+
reshape(newShape: Shape): GradTensor;
|
|
175
|
+
/**
|
|
176
|
+
* Flatten the GradTensor to a 1-dimensional array.
|
|
177
|
+
*
|
|
178
|
+
* Returns a new 1D GradTensor containing all elements.
|
|
179
|
+
*
|
|
180
|
+
* @returns A 1D GradTensor with shape [size]
|
|
181
|
+
*
|
|
182
|
+
* @example
|
|
183
|
+
* ```ts
|
|
184
|
+
* const matrix = parameter([[1, 2, 3], [4, 5, 6]]);
|
|
185
|
+
* const flat = matrix.flatten();
|
|
186
|
+
* console.log(flat.shape); // [6]
|
|
187
|
+
* ```
|
|
188
|
+
*/
|
|
189
|
+
flatten(): GradTensor;
|
|
190
|
+
/**
|
|
191
|
+
* Create a view of the GradTensor with a different shape.
|
|
192
|
+
*
|
|
193
|
+
* Similar to reshape but uses the underlying tensor's view method.
|
|
194
|
+
*
|
|
195
|
+
* @param shape - The desired shape for the view
|
|
196
|
+
* @param strides - Optional custom strides
|
|
197
|
+
* @param offset - Optional offset into the data buffer
|
|
198
|
+
* @returns A new GradTensor view with the specified shape
|
|
199
|
+
*/
|
|
200
|
+
view(shape: Shape, strides?: readonly number[], offset?: number): GradTensor;
|
|
201
|
+
transpose(axes?: readonly number[]): GradTensor;
|
|
202
|
+
min(axis?: Axis, keepdims?: boolean): GradTensor;
|
|
203
|
+
abs(): GradTensor;
|
|
204
|
+
clip(minVal: number, maxVal: number): GradTensor;
|
|
205
|
+
leakyRelu(negativeSlope?: number): GradTensor;
|
|
206
|
+
elu(alpha?: number): GradTensor;
|
|
207
|
+
gelu(): GradTensor;
|
|
208
|
+
/**
|
|
209
|
+
* Return a human-readable string representation of this GradTensor.
|
|
210
|
+
*
|
|
211
|
+
* Delegates to the underlying {@link Tensor.toString} and appends
|
|
212
|
+
* gradient metadata.
|
|
213
|
+
*
|
|
214
|
+
* @param maxElements - Maximum elements per dimension before summarizing (default: 6).
|
|
215
|
+
* @returns Formatted string representation
|
|
216
|
+
*/
|
|
217
|
+
toString(maxElements?: number): string;
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Create a GradTensor with requiresGrad=true.
|
|
221
|
+
*/
|
|
222
|
+
declare function parameter(data: number | number[] | number[][] | number[][][] | Tensor, options?: GradTensorOptions): GradTensor;
|
|
223
|
+
/**
|
|
224
|
+
* Context manager to disable gradient calculation.
|
|
225
|
+
*
|
|
226
|
+
* **Important:** The callback must be synchronous. Passing an async function
|
|
227
|
+
* will cause `gradEnabled` to be restored before the awaited work finishes,
|
|
228
|
+
* silently breaking gradient tracking inside the async continuation.
|
|
229
|
+
*
|
|
230
|
+
* @throws {DeepboxError} If the callback returns a Promise (async function detected)
|
|
231
|
+
*/
|
|
232
|
+
declare function noGrad<T>(fn: () => T): T;
|
|
233
|
+
/**
|
|
234
|
+
* Image to Column operation for GradTensor.
|
|
235
|
+
*/
|
|
236
|
+
declare function im2col(input: GradTensor, kernelSize: [number, number], stride: [number, number], padding: [number, number]): GradTensor;
|
|
237
|
+
declare function softmax(input: GradTensor, axis?: number): GradTensor;
|
|
238
|
+
declare function logSoftmax(input: GradTensor, axis?: number): GradTensor;
|
|
239
|
+
declare function variance(input: GradTensor, axis?: number, correction?: number): GradTensor;
|
|
240
|
+
declare function dropout(input: GradTensor, p?: number, training?: boolean): GradTensor;
|
|
241
|
+
|
|
242
|
+
export { GradTensor as G, type SliceRange as S, type GradTensorOptions as a, softmax as b, dropout as d, gather as g, im2col as i, logSoftmax as l, noGrad as n, parameter as p, slice as s, variance as v };
|