@milaboratories/pl-model-common 1.19.6 → 1.19.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/author_marker.d.ts +0 -1
- package/dist/base32_encode.cjs +56 -0
- package/dist/base32_encode.cjs.map +1 -0
- package/dist/base32_encode.d.ts +0 -1
- package/dist/base32_encode.js +54 -0
- package/dist/base32_encode.js.map +1 -0
- package/dist/block_state.d.ts +0 -1
- package/dist/bmodel/block_config.cjs +15 -0
- package/dist/bmodel/block_config.cjs.map +1 -0
- package/dist/bmodel/block_config.d.ts +0 -1
- package/dist/bmodel/block_config.js +13 -0
- package/dist/bmodel/block_config.js.map +1 -0
- package/dist/bmodel/code.d.ts +0 -1
- package/dist/bmodel/container.d.ts +0 -1
- package/dist/bmodel/index.d.ts +0 -1
- package/dist/bmodel/normalization.cjs +86 -0
- package/dist/bmodel/normalization.cjs.map +1 -0
- package/dist/bmodel/normalization.d.ts +0 -1
- package/dist/bmodel/normalization.js +84 -0
- package/dist/bmodel/normalization.js.map +1 -0
- package/dist/bmodel/types.d.ts +0 -1
- package/dist/branding.d.ts +0 -1
- package/dist/common_types.d.ts +0 -1
- package/dist/driver_kit.d.ts +0 -1
- package/dist/drivers/blob.cjs +27 -0
- package/dist/drivers/blob.cjs.map +1 -0
- package/dist/drivers/blob.d.ts +0 -1
- package/dist/drivers/blob.js +23 -0
- package/dist/drivers/blob.js.map +1 -0
- package/dist/drivers/index.d.ts +0 -1
- package/dist/drivers/interfaces.d.ts +0 -1
- package/dist/drivers/log.cjs +9 -0
- package/dist/drivers/log.cjs.map +1 -0
- package/dist/drivers/log.d.ts +0 -1
- package/dist/drivers/log.js +7 -0
- package/dist/drivers/log.js.map +1 -0
- package/dist/drivers/ls.cjs +39 -0
- package/dist/drivers/ls.cjs.map +1 -0
- package/dist/drivers/ls.d.ts +0 -1
- package/dist/drivers/ls.js +34 -0
- package/dist/drivers/ls.js.map +1 -0
- package/dist/drivers/pframe/column_filter.d.ts +0 -1
- package/dist/drivers/pframe/data_info.cjs +275 -0
- package/dist/drivers/pframe/data_info.cjs.map +1 -0
- package/dist/drivers/pframe/data_info.d.ts +0 -1
- package/dist/drivers/pframe/data_info.js +266 -0
- package/dist/drivers/pframe/data_info.js.map +1 -0
- package/dist/drivers/pframe/data_types.cjs +91 -0
- package/dist/drivers/pframe/data_types.cjs.map +1 -0
- package/dist/drivers/pframe/data_types.d.ts +0 -1
- package/dist/drivers/pframe/data_types.js +83 -0
- package/dist/drivers/pframe/data_types.js.map +1 -0
- package/dist/drivers/pframe/driver.d.ts +0 -1
- package/dist/drivers/pframe/find_columns.d.ts +0 -1
- package/dist/drivers/pframe/index.d.ts +0 -1
- package/dist/drivers/pframe/linker_columns.cjs +218 -0
- package/dist/drivers/pframe/linker_columns.cjs.map +1 -0
- package/dist/drivers/pframe/linker_columns.d.ts +0 -1
- package/dist/drivers/pframe/linker_columns.js +216 -0
- package/dist/drivers/pframe/linker_columns.js.map +1 -0
- package/dist/drivers/pframe/pframe.d.ts +0 -1
- package/dist/drivers/pframe/spec/anchored.cjs +234 -0
- package/dist/drivers/pframe/spec/anchored.cjs.map +1 -0
- package/dist/drivers/pframe/spec/anchored.d.ts +0 -1
- package/dist/drivers/pframe/spec/anchored.js +231 -0
- package/dist/drivers/pframe/spec/anchored.js.map +1 -0
- package/dist/drivers/pframe/spec/filtered_column.cjs +13 -0
- package/dist/drivers/pframe/spec/filtered_column.cjs.map +1 -0
- package/dist/drivers/pframe/spec/filtered_column.d.ts +0 -1
- package/dist/drivers/pframe/spec/filtered_column.js +11 -0
- package/dist/drivers/pframe/spec/filtered_column.js.map +1 -0
- package/dist/drivers/pframe/spec/ids.cjs +24 -0
- package/dist/drivers/pframe/spec/ids.cjs.map +1 -0
- package/dist/drivers/pframe/spec/ids.d.ts +0 -1
- package/dist/drivers/pframe/spec/ids.js +21 -0
- package/dist/drivers/pframe/spec/ids.js.map +1 -0
- package/dist/drivers/pframe/spec/index.d.ts +0 -1
- package/dist/drivers/pframe/spec/native_id.cjs +20 -0
- package/dist/drivers/pframe/spec/native_id.cjs.map +1 -0
- package/dist/drivers/pframe/spec/native_id.d.ts +0 -1
- package/dist/drivers/pframe/spec/native_id.js +18 -0
- package/dist/drivers/pframe/spec/native_id.js.map +1 -0
- package/dist/drivers/pframe/spec/selectors.cjs +120 -0
- package/dist/drivers/pframe/spec/selectors.cjs.map +1 -0
- package/dist/drivers/pframe/spec/selectors.d.ts +0 -1
- package/dist/drivers/pframe/spec/selectors.js +116 -0
- package/dist/drivers/pframe/spec/selectors.js.map +1 -0
- package/dist/drivers/pframe/spec/spec.cjs +361 -0
- package/dist/drivers/pframe/spec/spec.cjs.map +1 -0
- package/dist/drivers/pframe/spec/spec.d.ts +0 -1
- package/dist/drivers/pframe/spec/spec.js +332 -0
- package/dist/drivers/pframe/spec/spec.js.map +1 -0
- package/dist/drivers/pframe/table.d.ts +0 -1
- package/dist/drivers/pframe/table_calculate.cjs +43 -0
- package/dist/drivers/pframe/table_calculate.cjs.map +1 -0
- package/dist/drivers/pframe/table_calculate.d.ts +0 -1
- package/dist/drivers/pframe/table_calculate.js +40 -0
- package/dist/drivers/pframe/table_calculate.js.map +1 -0
- package/dist/drivers/pframe/table_common.cjs +19 -0
- package/dist/drivers/pframe/table_common.cjs.map +1 -0
- package/dist/drivers/pframe/table_common.d.ts +0 -1
- package/dist/drivers/pframe/table_common.js +17 -0
- package/dist/drivers/pframe/table_common.js.map +1 -0
- package/dist/drivers/pframe/type_util.d.ts +0 -1
- package/dist/drivers/pframe/unique_values.d.ts +0 -1
- package/dist/drivers/upload.d.ts +0 -1
- package/dist/drivers/urls.cjs +14 -0
- package/dist/drivers/urls.cjs.map +1 -0
- package/dist/drivers/urls.d.ts +0 -1
- package/dist/drivers/urls.js +11 -0
- package/dist/drivers/urls.js.map +1 -0
- package/dist/errors.cjs +141 -0
- package/dist/errors.cjs.map +1 -0
- package/dist/errors.d.ts +0 -1
- package/dist/errors.js +121 -0
- package/dist/errors.js.map +1 -0
- package/dist/flags/block_flags.cjs +8 -0
- package/dist/flags/block_flags.cjs.map +1 -0
- package/dist/flags/block_flags.d.ts +0 -1
- package/dist/flags/block_flags.js +5 -0
- package/dist/flags/block_flags.js.map +1 -0
- package/dist/flags/flag_utils.cjs +100 -0
- package/dist/flags/flag_utils.cjs.map +1 -0
- package/dist/flags/flag_utils.d.ts +0 -1
- package/dist/flags/flag_utils.js +94 -0
- package/dist/flags/flag_utils.js.map +1 -0
- package/dist/flags/index.d.ts +0 -1
- package/dist/flags/type_utils.d.ts +0 -1
- package/dist/index.cjs +151 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +10 -12
- package/dist/index.js +27 -1
- package/dist/index.js.map +1 -1
- package/dist/json.cjs +18 -0
- package/dist/json.cjs.map +1 -0
- package/dist/json.d.ts +0 -1
- package/dist/json.js +14 -0
- package/dist/json.js.map +1 -0
- package/dist/navigation.cjs +6 -0
- package/dist/navigation.cjs.map +1 -0
- package/dist/navigation.d.ts +0 -1
- package/dist/navigation.js +4 -0
- package/dist/navigation.js.map +1 -0
- package/dist/plid.cjs +37 -0
- package/dist/plid.cjs.map +1 -0
- package/dist/plid.d.ts +0 -1
- package/dist/plid.js +30 -0
- package/dist/plid.js.map +1 -0
- package/dist/pool/entry.d.ts +0 -1
- package/dist/pool/index.d.ts +0 -1
- package/dist/pool/query.cjs +49 -0
- package/dist/pool/query.cjs.map +1 -0
- package/dist/pool/query.d.ts +0 -1
- package/dist/pool/query.js +47 -0
- package/dist/pool/query.js.map +1 -0
- package/dist/pool/spec.cjs +67 -0
- package/dist/pool/spec.cjs.map +1 -0
- package/dist/pool/spec.d.ts +0 -1
- package/dist/pool/spec.js +59 -0
- package/dist/pool/spec.js.map +1 -0
- package/dist/ref.cjs +88 -0
- package/dist/ref.cjs.map +1 -0
- package/dist/ref.d.ts +0 -1
- package/dist/ref.js +82 -0
- package/dist/ref.js.map +1 -0
- package/dist/utag.d.ts +0 -1
- package/dist/util.cjs +8 -0
- package/dist/util.cjs.map +1 -0
- package/dist/util.d.ts +0 -1
- package/dist/util.js +6 -0
- package/dist/util.js.map +1 -0
- package/dist/value_or_error.cjs +8 -0
- package/dist/value_or_error.cjs.map +1 -0
- package/dist/value_or_error.d.ts +0 -1
- package/dist/value_or_error.js +6 -0
- package/dist/value_or_error.js.map +1 -0
- package/package.json +12 -10
- package/src/drivers/pframe/linker_columns.test.ts +48 -0
- package/src/drivers/pframe/linker_columns.ts +14 -7
- package/src/index.ts +10 -11
- package/dist/author_marker.d.ts.map +0 -1
- package/dist/base32_encode.d.ts.map +0 -1
- package/dist/block_state.d.ts.map +0 -1
- package/dist/bmodel/block_config.d.ts.map +0 -1
- package/dist/bmodel/code.d.ts.map +0 -1
- package/dist/bmodel/container.d.ts.map +0 -1
- package/dist/bmodel/index.d.ts.map +0 -1
- package/dist/bmodel/normalization.d.ts.map +0 -1
- package/dist/bmodel/types.d.ts.map +0 -1
- package/dist/branding.d.ts.map +0 -1
- package/dist/common_types.d.ts.map +0 -1
- package/dist/driver_kit.d.ts.map +0 -1
- package/dist/drivers/blob.d.ts.map +0 -1
- package/dist/drivers/index.d.ts.map +0 -1
- package/dist/drivers/interfaces.d.ts.map +0 -1
- package/dist/drivers/log.d.ts.map +0 -1
- package/dist/drivers/ls.d.ts.map +0 -1
- package/dist/drivers/pframe/column_filter.d.ts.map +0 -1
- package/dist/drivers/pframe/data_info.d.ts.map +0 -1
- package/dist/drivers/pframe/data_types.d.ts.map +0 -1
- package/dist/drivers/pframe/driver.d.ts.map +0 -1
- package/dist/drivers/pframe/find_columns.d.ts.map +0 -1
- package/dist/drivers/pframe/index.d.ts.map +0 -1
- package/dist/drivers/pframe/linker_columns.d.ts.map +0 -1
- package/dist/drivers/pframe/pframe.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/anchored.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/filtered_column.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/ids.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/index.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/native_id.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/selectors.d.ts.map +0 -1
- package/dist/drivers/pframe/spec/spec.d.ts.map +0 -1
- package/dist/drivers/pframe/table.d.ts.map +0 -1
- package/dist/drivers/pframe/table_calculate.d.ts.map +0 -1
- package/dist/drivers/pframe/table_common.d.ts.map +0 -1
- package/dist/drivers/pframe/type_util.d.ts.map +0 -1
- package/dist/drivers/pframe/unique_values.d.ts.map +0 -1
- package/dist/drivers/upload.d.ts.map +0 -1
- package/dist/drivers/urls.d.ts.map +0 -1
- package/dist/errors.d.ts.map +0 -1
- package/dist/flags/block_flags.d.ts.map +0 -1
- package/dist/flags/flag_utils.d.ts.map +0 -1
- package/dist/flags/index.d.ts.map +0 -1
- package/dist/flags/type_utils.d.ts.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.mjs +0 -1582
- package/dist/index.mjs.map +0 -1
- package/dist/json.d.ts.map +0 -1
- package/dist/navigation.d.ts.map +0 -1
- package/dist/plid.d.ts.map +0 -1
- package/dist/pool/entry.d.ts.map +0 -1
- package/dist/pool/index.d.ts.map +0 -1
- package/dist/pool/query.d.ts.map +0 -1
- package/dist/pool/spec.d.ts.map +0 -1
- package/dist/ref.d.ts.map +0 -1
- package/dist/utag.d.ts.map +0 -1
- package/dist/util.d.ts.map +0 -1
- package/dist/value_or_error.d.ts.map +0 -1
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
import { assertNever } from '../../util.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Type guard function that checks if the given value is a valid DataInfo.
|
|
5
|
+
*
|
|
6
|
+
* @param value - The value to check
|
|
7
|
+
* @returns True if the value is a valid DataInfo, false otherwise
|
|
8
|
+
*/
|
|
9
|
+
function isDataInfo(value) {
|
|
10
|
+
if (!value || typeof value !== 'object') {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
const data = value;
|
|
14
|
+
if (!('type' in data)) {
|
|
15
|
+
return false;
|
|
16
|
+
}
|
|
17
|
+
switch (data.type) {
|
|
18
|
+
case 'Json':
|
|
19
|
+
return (typeof data.keyLength === 'number'
|
|
20
|
+
&& data.data !== undefined
|
|
21
|
+
&& typeof data.data === 'object');
|
|
22
|
+
case 'JsonPartitioned':
|
|
23
|
+
case 'BinaryPartitioned':
|
|
24
|
+
case 'ParquetPartitioned':
|
|
25
|
+
return (typeof data.partitionKeyLength === 'number'
|
|
26
|
+
&& data.parts !== undefined
|
|
27
|
+
&& typeof data.parts === 'object');
|
|
28
|
+
default:
|
|
29
|
+
return false;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
function mapDataInfo(dataInfo, mapFn) {
|
|
33
|
+
if (dataInfo === undefined) {
|
|
34
|
+
return undefined;
|
|
35
|
+
}
|
|
36
|
+
switch (dataInfo.type) {
|
|
37
|
+
case 'Json':
|
|
38
|
+
// Json type doesn't contain blobs, so return as is
|
|
39
|
+
return dataInfo;
|
|
40
|
+
case 'JsonPartitioned': {
|
|
41
|
+
// Map each blob in parts
|
|
42
|
+
const newParts = {};
|
|
43
|
+
for (const [key, blob] of Object.entries(dataInfo.parts)) {
|
|
44
|
+
newParts[key] = mapFn(blob);
|
|
45
|
+
}
|
|
46
|
+
return {
|
|
47
|
+
...dataInfo,
|
|
48
|
+
parts: newParts,
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
case 'BinaryPartitioned': {
|
|
52
|
+
// Map each index and values blob in parts
|
|
53
|
+
const newParts = {};
|
|
54
|
+
for (const [key, chunk] of Object.entries(dataInfo.parts)) {
|
|
55
|
+
newParts[key] = {
|
|
56
|
+
index: mapFn(chunk.index),
|
|
57
|
+
values: mapFn(chunk.values),
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
return {
|
|
61
|
+
...dataInfo,
|
|
62
|
+
parts: newParts,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
case 'ParquetPartitioned': {
|
|
66
|
+
// Map each blob in parts
|
|
67
|
+
const newParts = {};
|
|
68
|
+
for (const [key, blob] of Object.entries(dataInfo.parts)) {
|
|
69
|
+
newParts[key] = mapFn(blob);
|
|
70
|
+
}
|
|
71
|
+
return {
|
|
72
|
+
...dataInfo,
|
|
73
|
+
parts: newParts,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* @param dataInfo - The source DataInfo object
|
|
80
|
+
* @param cb - Callback, function that have access to every blob to visit them all
|
|
81
|
+
* @returns Nothing
|
|
82
|
+
*/
|
|
83
|
+
function visitDataInfo(dataInfo, cb) {
|
|
84
|
+
switch (dataInfo.type) {
|
|
85
|
+
case 'Json':
|
|
86
|
+
// Json type doesn't contain blobs, so return as is
|
|
87
|
+
break;
|
|
88
|
+
case 'JsonPartitioned': {
|
|
89
|
+
// Visit each blob in parts
|
|
90
|
+
Object.values(dataInfo.parts).forEach(cb);
|
|
91
|
+
break;
|
|
92
|
+
}
|
|
93
|
+
case 'BinaryPartitioned': {
|
|
94
|
+
// Visit each index and values blob in parts
|
|
95
|
+
Object.values(dataInfo.parts).forEach((chunk) => {
|
|
96
|
+
cb(chunk.index);
|
|
97
|
+
cb(chunk.values);
|
|
98
|
+
});
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
case 'ParquetPartitioned': {
|
|
102
|
+
// Visit each blob in parts
|
|
103
|
+
Object.values(dataInfo.parts).forEach(cb);
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Type guard function that checks if the given value is a valid DataInfoEntries.
|
|
110
|
+
*
|
|
111
|
+
* @param value - The value to check
|
|
112
|
+
* @returns True if the value is a valid DataInfoEntries, false otherwise
|
|
113
|
+
*/
|
|
114
|
+
function isDataInfoEntries(value) {
|
|
115
|
+
if (!value || typeof value !== 'object') {
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
const data = value;
|
|
119
|
+
if (!('type' in data)) {
|
|
120
|
+
return false;
|
|
121
|
+
}
|
|
122
|
+
switch (data.type) {
|
|
123
|
+
case 'Json':
|
|
124
|
+
return (typeof data.keyLength === 'number'
|
|
125
|
+
&& Array.isArray(data.data));
|
|
126
|
+
case 'JsonPartitioned':
|
|
127
|
+
case 'BinaryPartitioned':
|
|
128
|
+
case 'ParquetPartitioned':
|
|
129
|
+
return (typeof data.partitionKeyLength === 'number'
|
|
130
|
+
&& Array.isArray(data.parts));
|
|
131
|
+
default:
|
|
132
|
+
return false;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Type guard function that checks if the given value is a valid PartitionedDataInfoEntries.
|
|
137
|
+
*
|
|
138
|
+
* @template Blob - Type parameter representing the storage reference type
|
|
139
|
+
* @param value - The value to check
|
|
140
|
+
* @returns True if the value is a valid PartitionedDataInfoEntries, false otherwise
|
|
141
|
+
*/
|
|
142
|
+
function isPartitionedDataInfoEntries(value) {
|
|
143
|
+
if (!isDataInfoEntries(value))
|
|
144
|
+
return false;
|
|
145
|
+
switch (value.type) {
|
|
146
|
+
case 'JsonPartitioned':
|
|
147
|
+
case 'BinaryPartitioned':
|
|
148
|
+
case 'ParquetPartitioned':
|
|
149
|
+
return true;
|
|
150
|
+
default:
|
|
151
|
+
return false;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Converts DataInfo to DataInfoEntries
|
|
156
|
+
*
|
|
157
|
+
* @param dataInfo - The record-based DataInfo object
|
|
158
|
+
* @returns The equivalent entry-based DataInfoEntries object
|
|
159
|
+
*/
|
|
160
|
+
function dataInfoToEntries(dataInfo) {
|
|
161
|
+
switch (dataInfo.type) {
|
|
162
|
+
case 'Json': return {
|
|
163
|
+
type: 'Json',
|
|
164
|
+
keyLength: dataInfo.keyLength,
|
|
165
|
+
data: Object.entries(dataInfo.data).map(([keyStr, value]) => {
|
|
166
|
+
const key = JSON.parse(keyStr);
|
|
167
|
+
return { key, value };
|
|
168
|
+
}),
|
|
169
|
+
};
|
|
170
|
+
case 'JsonPartitioned': return {
|
|
171
|
+
type: 'JsonPartitioned',
|
|
172
|
+
partitionKeyLength: dataInfo.partitionKeyLength,
|
|
173
|
+
parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {
|
|
174
|
+
const key = JSON.parse(keyStr);
|
|
175
|
+
return { key, value: blob };
|
|
176
|
+
}),
|
|
177
|
+
};
|
|
178
|
+
case 'BinaryPartitioned': return {
|
|
179
|
+
type: 'BinaryPartitioned',
|
|
180
|
+
partitionKeyLength: dataInfo.partitionKeyLength,
|
|
181
|
+
parts: Object.entries(dataInfo.parts).map(([keyStr, chunk]) => {
|
|
182
|
+
const key = JSON.parse(keyStr);
|
|
183
|
+
return { key, value: chunk };
|
|
184
|
+
}),
|
|
185
|
+
};
|
|
186
|
+
case 'ParquetPartitioned': return {
|
|
187
|
+
type: 'ParquetPartitioned',
|
|
188
|
+
partitionKeyLength: dataInfo.partitionKeyLength,
|
|
189
|
+
parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {
|
|
190
|
+
const key = JSON.parse(keyStr);
|
|
191
|
+
return { key, value: blob };
|
|
192
|
+
}),
|
|
193
|
+
};
|
|
194
|
+
default:
|
|
195
|
+
assertNever(dataInfo);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
/**
|
|
199
|
+
* Converts DataInfoEntries to DataInfo
|
|
200
|
+
*
|
|
201
|
+
* @param dataInfoEntries - The entry-based DataInfoEntries object
|
|
202
|
+
* @returns The equivalent record-based DataInfo object
|
|
203
|
+
*/
|
|
204
|
+
function entriesToDataInfo(dataInfoEntries) {
|
|
205
|
+
switch (dataInfoEntries.type) {
|
|
206
|
+
case 'Json': return {
|
|
207
|
+
type: 'Json',
|
|
208
|
+
keyLength: dataInfoEntries.keyLength,
|
|
209
|
+
data: Object.fromEntries(dataInfoEntries.data.map(({ key, value }) => [JSON.stringify(key), value])),
|
|
210
|
+
};
|
|
211
|
+
case 'JsonPartitioned': return {
|
|
212
|
+
type: 'JsonPartitioned',
|
|
213
|
+
partitionKeyLength: dataInfoEntries.partitionKeyLength,
|
|
214
|
+
parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),
|
|
215
|
+
};
|
|
216
|
+
case 'BinaryPartitioned': return {
|
|
217
|
+
type: 'BinaryPartitioned',
|
|
218
|
+
partitionKeyLength: dataInfoEntries.partitionKeyLength,
|
|
219
|
+
parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),
|
|
220
|
+
};
|
|
221
|
+
case 'ParquetPartitioned': return {
|
|
222
|
+
type: 'ParquetPartitioned',
|
|
223
|
+
partitionKeyLength: dataInfoEntries.partitionKeyLength,
|
|
224
|
+
parts: Object.fromEntries(dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value])),
|
|
225
|
+
};
|
|
226
|
+
default:
|
|
227
|
+
assertNever(dataInfoEntries);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
function mapDataInfoEntries(dataInfoEntries, mapFn) {
|
|
231
|
+
if (dataInfoEntries === undefined) {
|
|
232
|
+
return undefined;
|
|
233
|
+
}
|
|
234
|
+
switch (dataInfoEntries.type) {
|
|
235
|
+
case 'Json':
|
|
236
|
+
// Json type doesn't contain blobs, so return as is
|
|
237
|
+
return dataInfoEntries;
|
|
238
|
+
case 'JsonPartitioned': return {
|
|
239
|
+
...dataInfoEntries,
|
|
240
|
+
parts: dataInfoEntries.parts.map((entry) => ({
|
|
241
|
+
key: entry.key,
|
|
242
|
+
value: mapFn(entry.value),
|
|
243
|
+
})),
|
|
244
|
+
};
|
|
245
|
+
case 'BinaryPartitioned': return {
|
|
246
|
+
...dataInfoEntries,
|
|
247
|
+
parts: dataInfoEntries.parts.map((entry) => ({
|
|
248
|
+
key: entry.key,
|
|
249
|
+
value: {
|
|
250
|
+
index: mapFn(entry.value.index),
|
|
251
|
+
values: mapFn(entry.value.values),
|
|
252
|
+
},
|
|
253
|
+
})),
|
|
254
|
+
};
|
|
255
|
+
case 'ParquetPartitioned': return {
|
|
256
|
+
...dataInfoEntries,
|
|
257
|
+
parts: dataInfoEntries.parts.map((entry) => ({
|
|
258
|
+
key: entry.key,
|
|
259
|
+
value: mapFn(entry.value),
|
|
260
|
+
})),
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
export { dataInfoToEntries, entriesToDataInfo, isDataInfo, isDataInfoEntries, isPartitionedDataInfoEntries, mapDataInfo, mapDataInfoEntries, visitDataInfo };
|
|
266
|
+
//# sourceMappingURL=data_info.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data_info.js","sources":["../../../src/drivers/pframe/data_info.ts"],"sourcesContent":["import { assertNever } from '../../util';\n\n/**\n * Represents a JavaScript representation of a value in a PColumn. Can be null, a number, or a string.\n * These are the primitive types that can be stored directly in PColumns.\n *\n * Note: Actual columns can hold more value types, which are converted to these JavaScript types\n * once they enter the JavaScript runtime.\n */\nexport type PColumnValue = null | number | string;\n\n/**\n * Represents a key for a PColumn value.\n * Can be an array of strings or numbers.\n */\nexport type PColumnKey = (number | string)[];\n\n/**\n * Represents a single entry in a PColumn's data structure.\n * Contains a key and a value.\n */\nexport type PColumnDataEntry<T> = {\n /** Key for the value */\n key: PColumnKey;\n\n /** Value / blob at the given key */\n value: T;\n};\n\n/**\n * Represents column data stored as a simple JSON structure.\n * Used for small datasets that can be efficiently stored directly in memory.\n */\nexport type JsonDataInfo = {\n /** Identifier for this data format ('Json') */\n type: 'Json';\n\n /** Number of axes that make up the complete key (tuple length) */\n keyLength: number;\n\n /**\n * Key-value pairs where keys are stringified tuples of axis values\n * and values are the column values for those coordinates\n */\n data: Record<string, PColumnValue>;\n};\n\n/**\n * Represents column data partitioned across multiple JSON blobs.\n * Used for larger datasets that need to be split into manageable chunks.\n */\nexport type JsonPartitionedDataInfo<Blob> = {\n /** Identifier for this data format ('JsonPartitioned') */\n type: 'JsonPartitioned';\n\n /** Number of leading axes used for partitioning */\n partitionKeyLength: number;\n\n /** Map of stringified partition keys to blob references */\n parts: Record<string, Blob>;\n};\n\n/**\n * Represents a binary format chunk containing index and values as separate blobs.\n * Used for efficient storage and retrieval of column data in binary format.\n */\nexport type BinaryChunk<Blob> = {\n /** Binary blob containing structured index information */\n index: Blob;\n\n /** Binary blob containing the actual values */\n values: Blob;\n};\n\n/**\n * Represents column data partitioned across multiple binary chunks.\n * Optimized for efficient storage and retrieval of large datasets.\n */\nexport type BinaryPartitionedDataInfo<Blob> = {\n /** Identifier for this data format ('BinaryPartitioned') */\n type: 'BinaryPartitioned';\n\n /** Number of leading axes used for partitioning */\n partitionKeyLength: number;\n\n /** Map of stringified partition keys to binary chunks */\n parts: Record<string, BinaryChunk<Blob>>;\n};\n\nexport type ParquetChunkMappingAxis = {\n /** Data type (matches PColumn axis types) */\n type: 'Int' | 'Long' | 'String';\n\n /** Field name in the Parquet file */\n id: string;\n};\n\nexport type ParquetChunkMappingColumn = {\n /** Data type (matches PColumn value type) */\n type: 'Int' | 'Long' | 'Float' | 'Double' | 'String';\n\n /** Field name in the Parquet file */\n id: string;\n};\n\nexport type ParquetChunkMapping = {\n /** Axes mappings - Parquet file is sorted by these fields in this order */\n axes: ParquetChunkMappingAxis[];\n\n /** Column mapping */\n column: ParquetChunkMappingColumn;\n};\n\nexport type ParquetChunkStats = {\n /** Number of rows in the chunk */\n numberOfRows: number;\n /** Byte size information for storage optimization and query planning */\n size: {\n /** Byte sizes for each axis column in the same order as axes mapping */\n axes: number[];\n /** Byte size for the data column */\n column: number;\n };\n};\n\nexport type ParquetChunkMetadata = {\n /** Content hash calculated for the specific axes and data this chunk represents */\n dataDigest: string;\n\n /** Pre-computed statistics for optimization without blob download */\n stats: Partial<ParquetChunkStats>;\n};\n\nexport type ParquetChunk<Blob> = {\n /** Parquet file (PTable) containing column data */\n data: Blob;\n} & ParquetChunkMapping & Partial<ParquetChunkMetadata>;\n\nexport type ParquetPartitionedDataInfo<Blob> = {\n /** Identifier for this data format ('ParquetPartitioned') */\n type: 'ParquetPartitioned';\n\n /** Number of leading axes used for partitioning */\n partitionKeyLength: number;\n\n /** Map of stringified partition keys to parquet files */\n parts: Record<string, Blob>;\n};\n\n/**\n * Union type representing all possible data storage formats for PColumn data.\n * The specific format used depends on data size, access patterns, and performance requirements.\n *\n * @template Blob - Type parameter representing the storage reference type (could be ResourceInfo, PFrameBlobId, etc.)\n */\nexport type DataInfo<Blob> =\n | JsonDataInfo\n | JsonPartitionedDataInfo<Blob>\n | BinaryPartitionedDataInfo<Blob>\n | ParquetPartitionedDataInfo<Blob>;\n\n/**\n * Type guard function that checks if the given value is a valid DataInfo.\n *\n * @param value - The value to check\n * @returns True if the value is a valid DataInfo, false otherwise\n */\nexport function isDataInfo<Blob>(value: unknown): value is DataInfo<Blob> {\n if (!value || typeof value !== 'object') {\n return false;\n }\n\n const data = value as Record<string, unknown>;\n if (!('type' in data)) {\n return false;\n }\n\n switch (data.type) {\n case 'Json':\n return (\n typeof data.keyLength === 'number'\n && data.data !== undefined\n && typeof data.data === 'object'\n );\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return (\n typeof data.partitionKeyLength === 'number'\n && data.parts !== undefined\n && typeof data.parts === 'object'\n );\n default:\n return false;\n }\n}\n\n/**\n * Maps blob references in a DataInfo object from one type to another using a mapping function.\n *\n * @template B1 - Source blob type\n * @template B2 - Target blob type\n * @param dataInfo - The source DataInfo object\n * @param mapFn - Function to transform blobs from type B1 to type B2\n * @returns A new DataInfo object with transformed blob references\n */\nexport function mapDataInfo<B1, B2>(\n dataInfo: ParquetPartitionedDataInfo<B1>,\n mapFn: (blob: B1) => B2,\n): ParquetPartitionedDataInfo<B2>;\nexport function mapDataInfo<B1, B2>(\n dataInfo: Exclude<DataInfo<B1>, ParquetPartitionedDataInfo<B1>>,\n mapFn: (blob: B1) => B2,\n): Exclude<DataInfo<B2>, ParquetPartitionedDataInfo<B2>>;\nexport function mapDataInfo<B1, B2>(\n dataInfo: DataInfo<B1>,\n mapFn: (blob: B1) => B2,\n): DataInfo<B2>;\nexport function mapDataInfo<B1, B2>(\n dataInfo: DataInfo<B1> | undefined,\n mapFn: (blob: B1) => B2,\n): DataInfo<B2> | undefined {\n if (dataInfo === undefined) {\n return undefined;\n }\n\n switch (dataInfo.type) {\n case 'Json':\n // Json type doesn't contain blobs, so return as is\n return dataInfo;\n case 'JsonPartitioned': {\n // Map each blob in parts\n const newParts: Record<string, B2> = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n case 'BinaryPartitioned': {\n // Map each index and values blob in parts\n const newParts: Record<string, BinaryChunk<B2>> = {};\n for (const [key, chunk] of Object.entries(dataInfo.parts)) {\n newParts[key] = {\n index: mapFn(chunk.index),\n values: mapFn(chunk.values),\n };\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n case 'ParquetPartitioned': {\n // Map each blob in parts\n const newParts: Record<string, B2> = {};\n for (const [key, blob] of Object.entries(dataInfo.parts)) {\n newParts[key] = mapFn(blob);\n }\n return {\n ...dataInfo,\n parts: newParts,\n };\n }\n }\n}\n\n/**\n * @param dataInfo - The source DataInfo object\n * @param cb - Callback, function that have access to every blob to visit them all\n * @returns Nothing\n */\nexport function visitDataInfo<B>(\n dataInfo: DataInfo<B>,\n cb: (blob: B) => void,\n): void {\n switch (dataInfo.type) {\n case 'Json':\n // Json type doesn't contain blobs, so return as is\n break;\n case 'JsonPartitioned': {\n // Visit each blob in parts\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n case 'BinaryPartitioned': {\n // Visit each index and values blob in parts\n Object.values(dataInfo.parts).forEach((chunk) => {\n cb(chunk.index);\n cb(chunk.values);\n });\n break;\n }\n case 'ParquetPartitioned': {\n // Visit each blob in parts\n Object.values(dataInfo.parts).forEach(cb);\n break;\n }\n }\n}\n\n//\n// Lightway representation for ExplicitJsonData\n//\n\n/**\n * Represents a single key-value entry in a column's explicit data structure.\n * Used when directly instantiating PColumns with explicit data.\n */\nexport type PColumnValuesEntry = {\n key: PColumnKey;\n val: PColumnValue;\n};\n\n/**\n * Array of key-value entries representing explicit column data.\n * Used for lightweight explicit instantiation of PColumns.\n */\nexport type PColumnValues = PColumnValuesEntry[];\n\n/**\n * Entry-based representation of JsonDataInfo\n */\nexport interface JsonDataInfoEntries {\n type: 'Json';\n keyLength: number;\n data: PColumnDataEntry<PColumnValue>[];\n}\n\n/**\n * Entry-based representation of JsonPartitionedDataInfo\n */\nexport interface JsonPartitionedDataInfoEntries<Blob> {\n type: 'JsonPartitioned';\n partitionKeyLength: number;\n parts: PColumnDataEntry<Blob>[];\n}\n\n/**\n * Entry-based representation of BinaryPartitionedDataInfo\n */\nexport interface BinaryPartitionedDataInfoEntries<Blob> {\n type: 'BinaryPartitioned';\n partitionKeyLength: number;\n parts: PColumnDataEntry<BinaryChunk<Blob>>[];\n}\n\n/**\n * Entry-based representation of ParquetPartitionedDataInfo\n */\nexport interface ParquetPartitionedDataInfoEntries<Blob> {\n type: 'ParquetPartitioned';\n partitionKeyLength: number;\n parts: PColumnDataEntry<Blob>[];\n}\n/**\n * Union type representing all possible entry-based partitioned data storage formats\n */\nexport type PartitionedDataInfoEntries<Blob> =\n | JsonPartitionedDataInfoEntries<Blob>\n | BinaryPartitionedDataInfoEntries<Blob>\n | ParquetPartitionedDataInfoEntries<Blob>;\n\n/**\n * Union type representing all possible entry-based data storage formats\n */\nexport type DataInfoEntries<Blob> =\n | JsonDataInfoEntries\n | PartitionedDataInfoEntries<Blob>;\n\n/**\n * Type guard function that checks if the given value is a valid DataInfoEntries.\n *\n * @param value - The value to check\n * @returns True if the value is a valid DataInfoEntries, false otherwise\n */\nexport function isDataInfoEntries<Blob>(value: unknown): value is DataInfoEntries<Blob> {\n if (!value || typeof value !== 'object') {\n return false;\n }\n\n const data = value as Record<string, unknown>;\n if (!('type' in data)) {\n return false;\n }\n\n switch (data.type) {\n case 'Json':\n return (\n typeof data.keyLength === 'number'\n && Array.isArray(data.data)\n );\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return (\n typeof data.partitionKeyLength === 'number'\n && Array.isArray(data.parts)\n );\n default:\n return false;\n }\n}\n\n/**\n * Type guard function that checks if the given value is a valid PartitionedDataInfoEntries.\n *\n * @template Blob - Type parameter representing the storage reference type\n * @param value - The value to check\n * @returns True if the value is a valid PartitionedDataInfoEntries, false otherwise\n */\nexport function isPartitionedDataInfoEntries<Blob>(value: unknown): value is PartitionedDataInfoEntries<Blob> {\n if (!isDataInfoEntries(value)) return false;\n switch (value.type) {\n case 'JsonPartitioned':\n case 'BinaryPartitioned':\n case 'ParquetPartitioned':\n return true;\n default:\n return false;\n }\n}\n\n/**\n * Converts DataInfo to DataInfoEntries\n *\n * @param dataInfo - The record-based DataInfo object\n * @returns The equivalent entry-based DataInfoEntries object\n */\nexport function dataInfoToEntries<Blob>(dataInfo: DataInfo<Blob>): DataInfoEntries<Blob> {\n switch (dataInfo.type) {\n case 'Json': return {\n type: 'Json',\n keyLength: dataInfo.keyLength,\n data: Object.entries(dataInfo.data).map(([keyStr, value]) => {\n const key = JSON.parse(keyStr) as PColumnKey;\n return { key, value } as PColumnDataEntry<PColumnValue>;\n }),\n };\n case 'JsonPartitioned': return {\n type: 'JsonPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr) as PColumnKey;\n return { key, value: blob } as PColumnDataEntry<Blob>;\n }),\n };\n case 'BinaryPartitioned': return {\n type: 'BinaryPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, chunk]) => {\n const key = JSON.parse(keyStr) as PColumnKey;\n return { key, value: chunk } as PColumnDataEntry<BinaryChunk<Blob>>;\n }),\n };\n case 'ParquetPartitioned': return {\n type: 'ParquetPartitioned',\n partitionKeyLength: dataInfo.partitionKeyLength,\n parts: Object.entries(dataInfo.parts).map(([keyStr, blob]) => {\n const key = JSON.parse(keyStr) as PColumnKey;\n return { key, value: blob } as PColumnDataEntry<Blob>;\n }),\n };\n default:\n assertNever(dataInfo);\n }\n}\n\n/**\n * Converts DataInfoEntries to DataInfo\n *\n * @param dataInfoEntries - The entry-based DataInfoEntries object\n * @returns The equivalent record-based DataInfo object\n */\nexport function entriesToDataInfo<Blob>(dataInfoEntries: DataInfoEntries<Blob>): DataInfo<Blob> {\n switch (dataInfoEntries.type) {\n case 'Json': return {\n type: 'Json',\n keyLength: dataInfoEntries.keyLength,\n data: Object.fromEntries(\n dataInfoEntries.data.map(({ key, value }) => [JSON.stringify(key), value]),\n ),\n };\n case 'JsonPartitioned': return {\n type: 'JsonPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(\n dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]),\n ),\n };\n case 'BinaryPartitioned': return {\n type: 'BinaryPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(\n dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]),\n ),\n };\n case 'ParquetPartitioned': return {\n type: 'ParquetPartitioned',\n partitionKeyLength: dataInfoEntries.partitionKeyLength,\n parts: Object.fromEntries(\n dataInfoEntries.parts.map(({ key, value }) => [JSON.stringify(key), value]),\n ),\n };\n default:\n assertNever(dataInfoEntries);\n }\n}\n\n/**\n * Maps blob references in a DataInfoEntries object from one type to another using a mapping function.\n *\n * @template B1 - Source blob type\n * @template B2 - Target blob type\n * @param dataInfoEntries - The source DataInfoEntries object\n * @param mapFn - Function to transform blobs from type B1 to type B2\n * @returns A new DataInfoEntries object with transformed blob references\n */\nexport function mapDataInfoEntries<B1, B2>(\n dataInfoEntries: DataInfoEntries<B1>,\n mapFn: (blob: B1) => B2,\n): DataInfoEntries<B2>;\nexport function mapDataInfoEntries<B1, B2>(\n dataInfoEntries: DataInfoEntries<B1> | undefined,\n mapFn: (blob: B1) => B2,\n): DataInfoEntries<B2> | undefined {\n if (dataInfoEntries === undefined) {\n return undefined;\n }\n\n switch (dataInfoEntries.type) {\n case 'Json':\n // Json type doesn't contain blobs, so return as is\n return dataInfoEntries;\n case 'JsonPartitioned': return {\n ...dataInfoEntries,\n parts: dataInfoEntries.parts.map((entry) => ({\n key: entry.key,\n value: mapFn(entry.value),\n })),\n };\n case 'BinaryPartitioned': return {\n ...dataInfoEntries,\n parts: dataInfoEntries.parts.map((entry) => ({\n key: entry.key,\n value: {\n index: mapFn(entry.value.index),\n values: mapFn(entry.value.values),\n },\n })),\n };\n case 'ParquetPartitioned': return {\n ...dataInfoEntries,\n parts: dataInfoEntries.parts.map((entry) => ({\n key: entry.key,\n value: mapFn(entry.value),\n })),\n };\n }\n}\n"],"names":[],"mappings":";;AAiKA;;;;;AAKG;AACG,SAAU,UAAU,CAAO,KAAc,EAAA;IAC7C,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACvC,QAAA,OAAO,KAAK;IACd;IAEA,MAAM,IAAI,GAAG,KAAgC;AAC7C,IAAA,IAAI,EAAE,MAAM,IAAI,IAAI,CAAC,EAAE;AACrB,QAAA,OAAO,KAAK;IACd;AAEA,IAAA,QAAQ,IAAI,CAAC,IAAI;AACf,QAAA,KAAK,MAAM;AACT,YAAA,QACE,OAAO,IAAI,CAAC,SAAS,KAAK;mBACvB,IAAI,CAAC,IAAI,KAAK;AACd,mBAAA,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ;AAEpC,QAAA,KAAK,iBAAiB;AACtB,QAAA,KAAK,mBAAmB;AACxB,QAAA,KAAK,oBAAoB;AACvB,YAAA,QACE,OAAO,IAAI,CAAC,kBAAkB,KAAK;mBAChC,IAAI,CAAC,KAAK,KAAK;AACf,mBAAA,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ;AAErC,QAAA;AACE,YAAA,OAAO,KAAK;;AAElB;AAuBM,SAAU,WAAW,CACzB,QAAkC,EAClC,KAAuB,EAAA;AAEvB,IAAA,IAAI,QAAQ,KAAK,SAAS,EAAE;AAC1B,QAAA,OAAO,SAAS;IAClB;AAEA,IAAA,QAAQ,QAAQ,CAAC,IAAI;AACnB,QAAA,KAAK,MAAM;;AAET,YAAA,OAAO,QAAQ;QACjB,KAAK,iBAAiB,EAAE;;YAEtB,MAAM,QAAQ,GAAuB,EAAE;AACvC,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACxD,QAAQ,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YAC7B;YACA,OAAO;AACL,gBAAA,GAAG,QAAQ;AACX,gBAAA,KAAK,EAAE,QAAQ;aAChB;QACH;QACA,KAAK,mBAAmB,EAAE;;YAExB,MAAM,QAAQ,GAAoC,EAAE;AACpD,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACzD,QAAQ,CAAC,GAAG,CAAC,GAAG;AACd,oBAAA,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC;AACzB,oBAAA,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC;iBAC5B;YACH;YACA,OAAO;AACL,gBAAA,GAAG,QAAQ;AACX,gBAAA,KAAK,EAAE,QAAQ;aAChB;QACH;QACA,KAAK,oBAAoB,EAAE;;YAEzB,MAAM,QAAQ,GAAuB,EAAE;AACvC,YAAA,KAAK,MAAM,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACxD,QAAQ,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,IAAI,CAAC;YAC7B;YACA,OAAO;AACL,gBAAA,GAAG,QAAQ;AACX,gBAAA,KAAK,EAAE,QAAQ;aAChB;QACH;;AAEJ;AAEA;;;;AAIG;AACG,SAAU,aAAa,CAC3B,QAAqB,EACrB,EAAqB,EAAA;AAErB,IAAA,QAAQ,QAAQ,CAAC,IAAI;AACnB,QAAA,KAAK,MAAM;;YAET;QACF,KAAK,iBAAiB,EAAE;;AAEtB,YAAA,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;YACzC;QACF;QACA,KAAK,mBAAmB,EAAE;;AAExB,YAAA,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,KAAK,KAAI;AAC9C,gBAAA,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC;AACf,gBAAA,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC;AAClB,YAAA,CAAC,CAAC;YACF;QACF;QACA,KAAK,oBAAoB,EAAE;;AAEzB,YAAA,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC;YACzC;QACF;;AAEJ;AAuEA;;;;;AAKG;AACG,SAAU,iBAAiB,CAAO,KAAc,EAAA;IACpD,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;AACvC,QAAA,OAAO,KAAK;IACd;IAEA,MAAM,IAAI,GAAG,KAAgC;AAC7C,IAAA,IAAI,EAAE,MAAM,IAAI,IAAI,CAAC,EAAE;AACrB,QAAA,OAAO,KAAK;IACd;AAEA,IAAA,QAAQ,IAAI,CAAC,IAAI;AACf,QAAA,KAAK,MAAM;AACT,YAAA,QACE,OAAO,IAAI,CAAC,SAAS,KAAK;mBACvB,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC;AAE/B,QAAA,KAAK,iBAAiB;AACtB,QAAA,KAAK,mBAAmB;AACxB,QAAA,KAAK,oBAAoB;AACvB,YAAA,QACE,OAAO,IAAI,CAAC,kBAAkB,KAAK;mBAChC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC;AAEhC,QAAA;AACE,YAAA,OAAO,KAAK;;AAElB;AAEA;;;;;;AAMG;AACG,SAAU,4BAA4B,CAAO,KAAc,EAAA;AAC/D,IAAA,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC;AAAE,QAAA,OAAO,KAAK;AAC3C,IAAA,QAAQ,KAAK,CAAC,IAAI;AAChB,QAAA,KAAK,iBAAiB;AACtB,QAAA,KAAK,mBAAmB;AACxB,QAAA,KAAK,oBAAoB;AACvB,YAAA,OAAO,IAAI;AACb,QAAA;AACE,YAAA,OAAO,KAAK;;AAElB;AAEA;;;;;AAKG;AACG,SAAU,iBAAiB,CAAO,QAAwB,EAAA;AAC9D,IAAA,QAAQ,QAAQ,CAAC,IAAI;QACnB,KAAK,MAAM,EAAE,OAAO;AAClB,YAAA,IAAI,EAAE,MAAM;YACZ,SAAS,EAAE,QAAQ,CAAC,SAAS;AAC7B,YAAA,IAAI,EAAE,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,KAAI;gBAC1D,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAe;AAC5C,gBAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAoC;AACzD,YAAA,CAAC,CAAC;SACH;QACD,KAAK,iBAAiB,EAAE,OAAO;AAC7B,YAAA,IAAI,EAAE,iBAAiB;YACvB,kBAAkB,EAAE,QAAQ,CAAC,kBAAkB;AAC/C,YAAA,KAAK,EAAE,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,KAAI;gBAC3D,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAe;AAC5C,gBAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAA4B;AACvD,YAAA,CAAC,CAAC;SACH;QACD,KAAK,mBAAmB,EAAE,OAAO;AAC/B,YAAA,IAAI,EAAE,mBAAmB;YACzB,kBAAkB,EAAE,QAAQ,CAAC,kBAAkB;AAC/C,YAAA,KAAK,EAAE,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,KAAI;gBAC5D,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAe;AAC5C,gBAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,EAAyC;AACrE,YAAA,CAAC,CAAC;SACH;QACD,KAAK,oBAAoB,EAAE,OAAO;AAChC,YAAA,IAAI,EAAE,oBAAoB;YAC1B,kBAAkB,EAAE,QAAQ,CAAC,kBAAkB;AAC/C,YAAA,KAAK,EAAE,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,IAAI,CAAC,KAAI;gBAC3D,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAe;AAC5C,gBAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAA4B;AACvD,YAAA,CAAC,CAAC;SACH;AACD,QAAA;YACE,WAAW,CAAC,QAAQ,CAAC;;AAE3B;AAEA;;;;;AAKG;AACG,SAAU,iBAAiB,CAAO,eAAsC,EAAA;AAC5E,IAAA,QAAQ,eAAe,CAAC,IAAI;QAC1B,KAAK,MAAM,EAAE,OAAO;AAClB,YAAA,IAAI,EAAE,MAAM;YACZ,SAAS,EAAE,eAAe,CAAC,SAAS;AACpC,YAAA,IAAI,EAAE,MAAM,CAAC,WAAW,CACtB,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,CAC3E;SACF;QACD,KAAK,iBAAiB,EAAE,OAAO;AAC7B,YAAA,IAAI,EAAE,iBAAiB;YACvB,kBAAkB,EAAE,eAAe,CAAC,kBAAkB;AACtD,YAAA,KAAK,EAAE,MAAM,CAAC,WAAW,CACvB,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,CAC5E;SACF;QACD,KAAK,mBAAmB,EAAE,OAAO;AAC/B,YAAA,IAAI,EAAE,mBAAmB;YACzB,kBAAkB,EAAE,eAAe,CAAC,kBAAkB;AACtD,YAAA,KAAK,EAAE,MAAM,CAAC,WAAW,CACvB,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,CAC5E;SACF;QACD,KAAK,oBAAoB,EAAE,OAAO;AAChC,YAAA,IAAI,EAAE,oBAAoB;YAC1B,kBAAkB,EAAE,eAAe,CAAC,kBAAkB;AACtD,YAAA,KAAK,EAAE,MAAM,CAAC,WAAW,CACvB,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,CAAC,CAC5E;SACF;AACD,QAAA;YACE,WAAW,CAAC,eAAe,CAAC;;AAElC;AAeM,SAAU,kBAAkB,CAChC,eAAgD,EAChD,KAAuB,EAAA;AAEvB,IAAA,IAAI,eAAe,KAAK,SAAS,EAAE;AACjC,QAAA,OAAO,SAAS;IAClB;AAEA,IAAA,QAAQ,eAAe,CAAC,IAAI;AAC1B,QAAA,KAAK,MAAM;;AAET,YAAA,OAAO,eAAe;QACxB,KAAK,iBAAiB,EAAE,OAAO;AAC7B,YAAA,GAAG,eAAe;AAClB,YAAA,KAAK,EAAE,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,MAAM;gBAC3C,GAAG,EAAE,KAAK,CAAC,GAAG;AACd,gBAAA,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC;AAC1B,aAAA,CAAC,CAAC;SACJ;QACD,KAAK,mBAAmB,EAAE,OAAO;AAC/B,YAAA,GAAG,eAAe;AAClB,YAAA,KAAK,EAAE,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,MAAM;gBAC3C,GAAG,EAAE,KAAK,CAAC,GAAG;AACd,gBAAA,KAAK,EAAE;oBACL,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC;oBAC/B,MAAM,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC;AAClC,iBAAA;AACF,aAAA,CAAC,CAAC;SACJ;QACD,KAAK,oBAAoB,EAAE,OAAO;AAChC,YAAA,GAAG,eAAe;AAClB,YAAA,KAAK,EAAE,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,MAAM;gBAC3C,GAAG,EAAE,KAAK,CAAC,GAAG;AACd,gBAAA,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK,CAAC;AAC1B,aAAA,CAAC,CAAC;SACJ;;AAEL;;;;"}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var spec = require('./spec/spec.cjs');
|
|
4
|
+
|
|
5
|
+
function isBitSet(bitVector, offset) {
|
|
6
|
+
const chunkIndex = Math.floor(offset / 8);
|
|
7
|
+
const mask = 1 << (7 - (offset % 8));
|
|
8
|
+
return (bitVector[chunkIndex] & mask) > 0;
|
|
9
|
+
}
|
|
10
|
+
function isValueAbsent(vector, row) {
|
|
11
|
+
return isBitSet(vector.absent, row);
|
|
12
|
+
}
|
|
13
|
+
function isValueNA(vector, row) {
|
|
14
|
+
if (vector.isNA)
|
|
15
|
+
return isBitSet(vector.isNA, row);
|
|
16
|
+
// Check for legacy magic values to support old desktop versions
|
|
17
|
+
const valueType = vector.type;
|
|
18
|
+
const value = vector.data[row];
|
|
19
|
+
switch (valueType) {
|
|
20
|
+
case spec.ValueType.Int:
|
|
21
|
+
return value === -2147483648;
|
|
22
|
+
case spec.ValueType.Long:
|
|
23
|
+
return value === -9007199254740991n;
|
|
24
|
+
case spec.ValueType.Float:
|
|
25
|
+
return Number.isNaN(value);
|
|
26
|
+
case spec.ValueType.Double:
|
|
27
|
+
return Number.isNaN(value);
|
|
28
|
+
case spec.ValueType.String:
|
|
29
|
+
return value === null;
|
|
30
|
+
case spec.ValueType.Bytes:
|
|
31
|
+
return value === null;
|
|
32
|
+
default:
|
|
33
|
+
throw Error(`unsupported data type: ${valueType}`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const PTableAbsent = { type: 'absent' };
|
|
37
|
+
/** Type guard for absent value */
|
|
38
|
+
function isPTableAbsent(value) {
|
|
39
|
+
return typeof value === 'object' && value !== null && 'type' in value && value.type === 'absent';
|
|
40
|
+
}
|
|
41
|
+
const PTableNA = null;
|
|
42
|
+
/** Type guard for NA value */
|
|
43
|
+
function isPTableNA(value) {
|
|
44
|
+
return value === PTableNA;
|
|
45
|
+
}
|
|
46
|
+
function isPTableValueAxis(value, isNA) {
|
|
47
|
+
return !(isNA ? isNA(value) : isPTableNA(value));
|
|
48
|
+
}
|
|
49
|
+
function pTableValueImpl(column, row, options) {
|
|
50
|
+
const valueType = column.type;
|
|
51
|
+
if (valueType === spec.ValueType.Bytes) {
|
|
52
|
+
throw Error('Bytes not yet supported');
|
|
53
|
+
}
|
|
54
|
+
if (options && 'dataType' in options && options.dataType !== undefined && options.dataType !== valueType) {
|
|
55
|
+
throw Error(`expected column of type ${options.dataType}, got ${valueType}`);
|
|
56
|
+
}
|
|
57
|
+
if (isValueAbsent(column, row)) {
|
|
58
|
+
return options?.absent !== undefined ? options.absent : PTableAbsent;
|
|
59
|
+
}
|
|
60
|
+
if (isValueNA(column, row)) {
|
|
61
|
+
return options?.na !== undefined ? options.na : PTableNA;
|
|
62
|
+
}
|
|
63
|
+
const value = column.data[row];
|
|
64
|
+
switch (valueType) {
|
|
65
|
+
case spec.ValueType.Int:
|
|
66
|
+
return value;
|
|
67
|
+
case spec.ValueType.Long:
|
|
68
|
+
return Number(value);
|
|
69
|
+
case spec.ValueType.Float:
|
|
70
|
+
return value;
|
|
71
|
+
case spec.ValueType.Double:
|
|
72
|
+
return value;
|
|
73
|
+
case spec.ValueType.String:
|
|
74
|
+
return value;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function pTableValue(column, row, options) {
|
|
78
|
+
return pTableValueImpl(column, row, options);
|
|
79
|
+
}
|
|
80
|
+
function pTableValueBranded(column, row, options) {
|
|
81
|
+
return pTableValueImpl(column, row, options);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
exports.PTableAbsent = PTableAbsent;
|
|
85
|
+
exports.PTableNA = PTableNA;
|
|
86
|
+
exports.isPTableAbsent = isPTableAbsent;
|
|
87
|
+
exports.isPTableNA = isPTableNA;
|
|
88
|
+
exports.isPTableValueAxis = isPTableValueAxis;
|
|
89
|
+
exports.pTableValue = pTableValue;
|
|
90
|
+
exports.pTableValueBranded = pTableValueBranded;
|
|
91
|
+
//# sourceMappingURL=data_types.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data_types.cjs","sources":["../../../src/drivers/pframe/data_types.ts"],"sourcesContent":["import type { Branded } from '../../branding';\nimport { ValueType } from './spec/spec';\n\nexport type PVectorDataInt = Int32Array;\nexport type PVectorDataLong = BigInt64Array;\nexport type PVectorDataFloat = Float32Array;\nexport type PVectorDataDouble = Float64Array;\nexport type PVectorDataString = (null | string)[];\nexport type PVectorDataBytes = (null | Uint8Array)[];\nexport type PVectorDataTyped<DataType extends ValueType> =\n DataType extends typeof ValueType.Int ? PVectorDataInt :\n DataType extends typeof ValueType.Long ? PVectorDataLong :\n DataType extends typeof ValueType.Float ? PVectorDataFloat :\n DataType extends typeof ValueType.Double ? PVectorDataDouble :\n DataType extends typeof ValueType.String ? PVectorDataString :\n DataType extends typeof ValueType.Bytes ? PVectorDataBytes :\n never;\nexport type PVectorData = PVectorDataTyped<ValueType>;\n\nexport type PTableVectorTyped<DataType extends ValueType> = {\n /** Stored data type */\n readonly type: DataType;\n\n /** Values for present positions, absent positions have NA values */\n readonly data: PVectorDataTyped<DataType>;\n\n /**\n * Encoded bit array marking some elements of this vector as NA,\n * call {@link bitSet} to read the data.\n * In old desktop versions NA values are encoded as magic values in data array.\n * */\n readonly isNA?: Uint8Array;\n\n /**\n * Encoded bit array marking some elements of this vector as absent,\n * call {@link bitSet} to read the data.\n * */\n readonly absent: Uint8Array;\n};\n/** Table column data in comparison to the data stored in a separate PColumn\n * may have some of the values \"absent\", i.e. as a result of missing record in\n * outer join operation. This information is encoded in {@link absent} field. */\nexport type PTableVector = PTableVectorTyped<ValueType>;\n\nfunction isBitSet(bitVector: Uint8Array, offset: number): boolean {\n const chunkIndex = Math.floor(offset / 8);\n const mask = 1 << (7 - (offset % 8));\n return (bitVector[chunkIndex] & mask) > 0;\n}\n\nfunction isValueAbsent(vector: PTableVector, row: number): boolean {\n return isBitSet(vector.absent, row);\n}\n\nfunction isValueNA(vector: PTableVector, row: number): boolean {\n if (vector.isNA) return isBitSet(vector.isNA, row);\n\n // Check for legacy magic values to support old desktop versions\n const valueType = vector.type;\n const value = vector.data[row];\n switch (valueType) {\n case ValueType.Int:\n return (value as PVectorDataInt[number]) === -2147483648;\n case ValueType.Long:\n return (value as PVectorDataLong[number]) === -9007199254740991n;\n case ValueType.Float:\n return Number.isNaN((value as PVectorDataFloat[number]));\n case ValueType.Double:\n return Number.isNaN((value as PVectorDataDouble[number]));\n case ValueType.String:\n return (value as PVectorDataString[number]) === null;\n case ValueType.Bytes:\n return (value as PVectorDataBytes[number]) === null;\n default:\n throw Error(`unsupported data type: ${valueType satisfies never}`);\n }\n}\n\nexport const PTableAbsent = { type: 'absent' } as const;\nexport type PTableAbsent = typeof PTableAbsent;\n\n/** Type guard for absent value */\nexport function isPTableAbsent(value: unknown): value is PTableAbsent {\n return typeof value === 'object' && value !== null && 'type' in value && value.type === 'absent';\n}\n\nexport const PTableNA = null;\nexport type PTableNA = typeof PTableNA;\n\n/** Type guard for NA value */\nexport function isPTableNA(value: unknown): value is PTableNA {\n return value === PTableNA;\n}\n\nexport type ValueTypeSupported = Exclude<ValueType, typeof ValueType.Bytes>;\n\nexport type PTableValueInt = number;\nexport type PTableValueLong = number;\nexport type PTableValueFloat = number;\nexport type PTableValueDouble = number;\nexport type PTableValueString = string;\nexport type PTableValueData<DataType extends ValueTypeSupported> =\n DataType extends typeof ValueType.Int ? PTableValueInt :\n DataType extends typeof ValueType.Long ? PTableValueLong :\n DataType extends typeof ValueType.Float ? PTableValueFloat :\n DataType extends typeof ValueType.Double ? PTableValueDouble :\n DataType extends typeof ValueType.String ? PTableValueString :\n never;\nexport type PTableValueDataBranded<DataType extends ValueTypeSupported> = Branded<PTableValueData<DataType>, DataType>;\nexport type PTableValue<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = Absent | NA | PTableValueData<DataType>;\nexport type PTableValueBranded<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = Absent | NA | PTableValueDataBranded<DataType>;\n\nexport type PTableValueAxis<\n Absent = PTableAbsent,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = PTableValue<Absent, never, DataType>;\n\nexport function isPTableValueAxis<Absent, NA, DataType extends ValueTypeSupported>(\n value: PTableValue<Absent, NA, DataType>,\n isNA: (value: PTableValue<Absent, NA, DataType>) => value is NA,\n): value is PTableValueAxis<Absent, DataType>;\nexport function isPTableValueAxis<Absent, DataType extends ValueTypeSupported>(\n value: PTableValue<Absent, PTableNA, DataType>,\n): value is PTableValueAxis<Absent, DataType>;\nexport function isPTableValueAxis<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n>(\n value: PTableValue<Absent, NA, DataType>,\n isNA?: (value: PTableValue<Absent, NA, DataType>) => value is NA,\n): value is PTableValueAxis<Absent, DataType> {\n return !(isNA ? isNA(value) : isPTableNA(value));\n}\n\nfunction pTableValueImpl<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n const valueType = column.type;\n if (valueType === ValueType.Bytes) {\n throw Error('Bytes not yet supported');\n }\n\n if (options && 'dataType' in options && options.dataType !== undefined && options.dataType !== valueType) {\n throw Error(`expected column of type ${options.dataType}, got ${valueType}`);\n }\n\n if (isValueAbsent(column, row)) {\n return options?.absent !== undefined ? options.absent : PTableAbsent;\n }\n\n if (isValueNA(column, row)) {\n return options?.na !== undefined ? options.na : PTableNA;\n }\n\n const value = column.data[row]!;\n switch (valueType) {\n case ValueType.Int:\n return value as PVectorDataInt[number];\n case ValueType.Long:\n return Number(value as PVectorDataLong[number]);\n case ValueType.Float:\n return value as PVectorDataFloat[number];\n case ValueType.Double:\n return value as PVectorDataDouble[number];\n case ValueType.String:\n return (value as PVectorDataString[number])!;\n }\n}\n\n/** Read PTableValue from PTable column at specified row */\nexport function pTableValue<DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n): DataType extends ValueTypeSupported ? PTableValue<PTableAbsent, PTableNA, DataType> : never;\nexport function pTableValue<FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n }\n): DataType extends ValueTypeSupported ? PTableValue<FillAbsent, PTableNA, DataType> : never;\nexport function pTableValue<FillNA, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValue<PTableAbsent, FillNA, DataType> : never;\nexport function pTableValue<FillNA, FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValue<FillAbsent, FillNA, DataType> : never;\nexport function pTableValue<FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n dataType: DataType;\n }\n): PTableValue<FillAbsent, PTableNA>;\nexport function pTableValue<FillNA, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n na: FillNA;\n dataType: DataType;\n }\n): PTableValue<PTableAbsent, FillNA, DataType>;\nexport function pTableValue<FillNA, FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n dataType: DataType;\n }\n): PTableValue<FillAbsent, FillNA, DataType>;\nexport function pTableValue<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n return pTableValueImpl(column, row, options);\n}\n\nexport function pTableValueBranded<DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n): DataType extends ValueTypeSupported ? PTableValueBranded<PTableAbsent, PTableNA, DataType> : never;\nexport function pTableValueBranded<FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<FillAbsent, PTableNA, DataType> : never;\nexport function pTableValueBranded<FillNA, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<PTableAbsent, FillNA, DataType> : never;\nexport function pTableValueBranded<FillNA, FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<FillAbsent, FillNA, DataType> : never;\nexport function pTableValueBranded<FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n dataType: DataType;\n }\n): PTableValueBranded<FillAbsent, PTableNA>;\nexport function pTableValueBranded<FillNA, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n na: FillNA;\n dataType: DataType;\n }\n): PTableValueBranded<PTableAbsent, FillNA, DataType>;\nexport function pTableValueBranded<FillNA, FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n dataType: DataType;\n }\n): PTableValueBranded<FillAbsent, FillNA, DataType>;\nexport function pTableValueBranded<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n return pTableValueImpl(column, row, options);\n}\n\n/** Used in requests to partially retrieve table's data */\nexport type TableRange = {\n /** Index of the first record to retrieve */\n readonly offset: number;\n\n /** Block length */\n readonly length: number;\n};\n\n/** Unified information about table shape */\nexport type PTableShape = {\n /** Number of unified table columns, including all axes and PColumn values */\n columns: number;\n\n /** Number of rows */\n rows: number;\n};\n"],"names":["ValueType"],"mappings":";;;;AA4CA,SAAS,QAAQ,CAAC,SAAqB,EAAE,MAAc,EAAA;IACrD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AACzC,IAAA,MAAM,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC;IACpC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,IAAI,IAAI,CAAC;AAC3C;AAEA,SAAS,aAAa,CAAC,MAAoB,EAAE,GAAW,EAAA;IACtD,OAAO,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;AACrC;AAEA,SAAS,SAAS,CAAC,MAAoB,EAAE,GAAW,EAAA;IAClD,IAAI,MAAM,CAAC,IAAI;QAAE,OAAO,QAAQ,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC;;AAGlD,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI;IAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC;IAC9B,QAAQ,SAAS;QACf,KAAKA,cAAS,CAAC,GAAG;AAChB,YAAA,OAAQ,KAAgC,KAAK,WAAW;QAC1D,KAAKA,cAAS,CAAC,IAAI;AACjB,YAAA,OAAQ,KAAiC,KAAK,CAAC,iBAAiB;QAClE,KAAKA,cAAS,CAAC,KAAK;AAClB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAE,KAAkC,CAAC;QAC1D,KAAKA,cAAS,CAAC,MAAM;AACnB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAE,KAAmC,CAAC;QAC3D,KAAKA,cAAS,CAAC,MAAM;YACnB,OAAQ,KAAmC,KAAK,IAAI;QACtD,KAAKA,cAAS,CAAC,KAAK;YAClB,OAAQ,KAAkC,KAAK,IAAI;AACrD,QAAA;AACE,YAAA,MAAM,KAAK,CAAC,CAAA,uBAAA,EAA0B,SAAyB,CAAA,CAAE,CAAC;;AAExE;MAEa,YAAY,GAAG,EAAE,IAAI,EAAE,QAAQ;AAG5C;AACM,SAAU,cAAc,CAAC,KAAc,EAAA;AAC3C,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,IAAI,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ;AAClG;AAEO,MAAM,QAAQ,GAAG;AAGxB;AACM,SAAU,UAAU,CAAC,KAAc,EAAA;IACvC,OAAO,KAAK,KAAK,QAAQ;AAC3B;AAwCM,SAAU,iBAAiB,CAK/B,KAAwC,EACxC,IAAgE,EAAA;AAEhE,IAAA,OAAO,EAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AAClD;AAEA,SAAS,eAAe,CAKtB,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;AAED,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI;AAC7B,IAAA,IAAI,SAAS,KAAKA,cAAS,CAAC,KAAK,EAAE;AACjC,QAAA,MAAM,KAAK,CAAC,yBAAyB,CAAC;IACxC;AAEA,IAAA,IAAI,OAAO,IAAI,UAAU,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,KAAK,SAAS,EAAE;QACxG,MAAM,KAAK,CAAC,CAAA,wBAAA,EAA2B,OAAO,CAAC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,CAAE,CAAC;IAC9E;AAEA,IAAA,IAAI,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AAC9B,QAAA,OAAO,OAAO,EAAE,MAAM,KAAK,SAAS,GAAG,OAAO,CAAC,MAAM,GAAG,YAAY;IACtE;AAEA,IAAA,IAAI,SAAS,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AAC1B,QAAA,OAAO,OAAO,EAAE,EAAE,KAAK,SAAS,GAAG,OAAO,CAAC,EAAE,GAAG,QAAQ;IAC1D;IAEA,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAE;IAC/B,QAAQ,SAAS;QACf,KAAKA,cAAS,CAAC,GAAG;AAChB,YAAA,OAAO,KAA+B;QACxC,KAAKA,cAAS,CAAC,IAAI;AACjB,YAAA,OAAO,MAAM,CAAC,KAAgC,CAAC;QACjD,KAAKA,cAAS,CAAC,KAAK;AAClB,YAAA,OAAO,KAAiC;QAC1C,KAAKA,cAAS,CAAC,MAAM;AACnB,YAAA,OAAO,KAAkC;QAC3C,KAAKA,cAAS,CAAC,MAAM;AACnB,YAAA,OAAQ,KAAoC;;AAElD;SAsDgB,WAAW,CAKzB,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;IAED,OAAO,eAAe,CAAC,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC;AAC9C;SAqDgB,kBAAkB,CAKhC,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;IAED,OAAO,eAAe,CAAC,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC;AAC9C;;;;;;;;;;"}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { ValueType } from './spec/spec.js';
|
|
2
|
+
|
|
3
|
+
function isBitSet(bitVector, offset) {
|
|
4
|
+
const chunkIndex = Math.floor(offset / 8);
|
|
5
|
+
const mask = 1 << (7 - (offset % 8));
|
|
6
|
+
return (bitVector[chunkIndex] & mask) > 0;
|
|
7
|
+
}
|
|
8
|
+
function isValueAbsent(vector, row) {
|
|
9
|
+
return isBitSet(vector.absent, row);
|
|
10
|
+
}
|
|
11
|
+
function isValueNA(vector, row) {
|
|
12
|
+
if (vector.isNA)
|
|
13
|
+
return isBitSet(vector.isNA, row);
|
|
14
|
+
// Check for legacy magic values to support old desktop versions
|
|
15
|
+
const valueType = vector.type;
|
|
16
|
+
const value = vector.data[row];
|
|
17
|
+
switch (valueType) {
|
|
18
|
+
case ValueType.Int:
|
|
19
|
+
return value === -2147483648;
|
|
20
|
+
case ValueType.Long:
|
|
21
|
+
return value === -9007199254740991n;
|
|
22
|
+
case ValueType.Float:
|
|
23
|
+
return Number.isNaN(value);
|
|
24
|
+
case ValueType.Double:
|
|
25
|
+
return Number.isNaN(value);
|
|
26
|
+
case ValueType.String:
|
|
27
|
+
return value === null;
|
|
28
|
+
case ValueType.Bytes:
|
|
29
|
+
return value === null;
|
|
30
|
+
default:
|
|
31
|
+
throw Error(`unsupported data type: ${valueType}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
const PTableAbsent = { type: 'absent' };
|
|
35
|
+
/** Type guard for absent value */
|
|
36
|
+
function isPTableAbsent(value) {
|
|
37
|
+
return typeof value === 'object' && value !== null && 'type' in value && value.type === 'absent';
|
|
38
|
+
}
|
|
39
|
+
const PTableNA = null;
|
|
40
|
+
/** Type guard for NA value */
|
|
41
|
+
function isPTableNA(value) {
|
|
42
|
+
return value === PTableNA;
|
|
43
|
+
}
|
|
44
|
+
function isPTableValueAxis(value, isNA) {
|
|
45
|
+
return !(isNA ? isNA(value) : isPTableNA(value));
|
|
46
|
+
}
|
|
47
|
+
function pTableValueImpl(column, row, options) {
|
|
48
|
+
const valueType = column.type;
|
|
49
|
+
if (valueType === ValueType.Bytes) {
|
|
50
|
+
throw Error('Bytes not yet supported');
|
|
51
|
+
}
|
|
52
|
+
if (options && 'dataType' in options && options.dataType !== undefined && options.dataType !== valueType) {
|
|
53
|
+
throw Error(`expected column of type ${options.dataType}, got ${valueType}`);
|
|
54
|
+
}
|
|
55
|
+
if (isValueAbsent(column, row)) {
|
|
56
|
+
return options?.absent !== undefined ? options.absent : PTableAbsent;
|
|
57
|
+
}
|
|
58
|
+
if (isValueNA(column, row)) {
|
|
59
|
+
return options?.na !== undefined ? options.na : PTableNA;
|
|
60
|
+
}
|
|
61
|
+
const value = column.data[row];
|
|
62
|
+
switch (valueType) {
|
|
63
|
+
case ValueType.Int:
|
|
64
|
+
return value;
|
|
65
|
+
case ValueType.Long:
|
|
66
|
+
return Number(value);
|
|
67
|
+
case ValueType.Float:
|
|
68
|
+
return value;
|
|
69
|
+
case ValueType.Double:
|
|
70
|
+
return value;
|
|
71
|
+
case ValueType.String:
|
|
72
|
+
return value;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
function pTableValue(column, row, options) {
|
|
76
|
+
return pTableValueImpl(column, row, options);
|
|
77
|
+
}
|
|
78
|
+
function pTableValueBranded(column, row, options) {
|
|
79
|
+
return pTableValueImpl(column, row, options);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export { PTableAbsent, PTableNA, isPTableAbsent, isPTableNA, isPTableValueAxis, pTableValue, pTableValueBranded };
|
|
83
|
+
//# sourceMappingURL=data_types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data_types.js","sources":["../../../src/drivers/pframe/data_types.ts"],"sourcesContent":["import type { Branded } from '../../branding';\nimport { ValueType } from './spec/spec';\n\nexport type PVectorDataInt = Int32Array;\nexport type PVectorDataLong = BigInt64Array;\nexport type PVectorDataFloat = Float32Array;\nexport type PVectorDataDouble = Float64Array;\nexport type PVectorDataString = (null | string)[];\nexport type PVectorDataBytes = (null | Uint8Array)[];\nexport type PVectorDataTyped<DataType extends ValueType> =\n DataType extends typeof ValueType.Int ? PVectorDataInt :\n DataType extends typeof ValueType.Long ? PVectorDataLong :\n DataType extends typeof ValueType.Float ? PVectorDataFloat :\n DataType extends typeof ValueType.Double ? PVectorDataDouble :\n DataType extends typeof ValueType.String ? PVectorDataString :\n DataType extends typeof ValueType.Bytes ? PVectorDataBytes :\n never;\nexport type PVectorData = PVectorDataTyped<ValueType>;\n\nexport type PTableVectorTyped<DataType extends ValueType> = {\n /** Stored data type */\n readonly type: DataType;\n\n /** Values for present positions, absent positions have NA values */\n readonly data: PVectorDataTyped<DataType>;\n\n /**\n * Encoded bit array marking some elements of this vector as NA,\n * call {@link bitSet} to read the data.\n * In old desktop versions NA values are encoded as magic values in data array.\n * */\n readonly isNA?: Uint8Array;\n\n /**\n * Encoded bit array marking some elements of this vector as absent,\n * call {@link bitSet} to read the data.\n * */\n readonly absent: Uint8Array;\n};\n/** Table column data in comparison to the data stored in a separate PColumn\n * may have some of the values \"absent\", i.e. as a result of missing record in\n * outer join operation. This information is encoded in {@link absent} field. */\nexport type PTableVector = PTableVectorTyped<ValueType>;\n\nfunction isBitSet(bitVector: Uint8Array, offset: number): boolean {\n const chunkIndex = Math.floor(offset / 8);\n const mask = 1 << (7 - (offset % 8));\n return (bitVector[chunkIndex] & mask) > 0;\n}\n\nfunction isValueAbsent(vector: PTableVector, row: number): boolean {\n return isBitSet(vector.absent, row);\n}\n\nfunction isValueNA(vector: PTableVector, row: number): boolean {\n if (vector.isNA) return isBitSet(vector.isNA, row);\n\n // Check for legacy magic values to support old desktop versions\n const valueType = vector.type;\n const value = vector.data[row];\n switch (valueType) {\n case ValueType.Int:\n return (value as PVectorDataInt[number]) === -2147483648;\n case ValueType.Long:\n return (value as PVectorDataLong[number]) === -9007199254740991n;\n case ValueType.Float:\n return Number.isNaN((value as PVectorDataFloat[number]));\n case ValueType.Double:\n return Number.isNaN((value as PVectorDataDouble[number]));\n case ValueType.String:\n return (value as PVectorDataString[number]) === null;\n case ValueType.Bytes:\n return (value as PVectorDataBytes[number]) === null;\n default:\n throw Error(`unsupported data type: ${valueType satisfies never}`);\n }\n}\n\nexport const PTableAbsent = { type: 'absent' } as const;\nexport type PTableAbsent = typeof PTableAbsent;\n\n/** Type guard for absent value */\nexport function isPTableAbsent(value: unknown): value is PTableAbsent {\n return typeof value === 'object' && value !== null && 'type' in value && value.type === 'absent';\n}\n\nexport const PTableNA = null;\nexport type PTableNA = typeof PTableNA;\n\n/** Type guard for NA value */\nexport function isPTableNA(value: unknown): value is PTableNA {\n return value === PTableNA;\n}\n\nexport type ValueTypeSupported = Exclude<ValueType, typeof ValueType.Bytes>;\n\nexport type PTableValueInt = number;\nexport type PTableValueLong = number;\nexport type PTableValueFloat = number;\nexport type PTableValueDouble = number;\nexport type PTableValueString = string;\nexport type PTableValueData<DataType extends ValueTypeSupported> =\n DataType extends typeof ValueType.Int ? PTableValueInt :\n DataType extends typeof ValueType.Long ? PTableValueLong :\n DataType extends typeof ValueType.Float ? PTableValueFloat :\n DataType extends typeof ValueType.Double ? PTableValueDouble :\n DataType extends typeof ValueType.String ? PTableValueString :\n never;\nexport type PTableValueDataBranded<DataType extends ValueTypeSupported> = Branded<PTableValueData<DataType>, DataType>;\nexport type PTableValue<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = Absent | NA | PTableValueData<DataType>;\nexport type PTableValueBranded<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = Absent | NA | PTableValueDataBranded<DataType>;\n\nexport type PTableValueAxis<\n Absent = PTableAbsent,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n> = PTableValue<Absent, never, DataType>;\n\nexport function isPTableValueAxis<Absent, NA, DataType extends ValueTypeSupported>(\n value: PTableValue<Absent, NA, DataType>,\n isNA: (value: PTableValue<Absent, NA, DataType>) => value is NA,\n): value is PTableValueAxis<Absent, DataType>;\nexport function isPTableValueAxis<Absent, DataType extends ValueTypeSupported>(\n value: PTableValue<Absent, PTableNA, DataType>,\n): value is PTableValueAxis<Absent, DataType>;\nexport function isPTableValueAxis<\n Absent = PTableAbsent,\n NA = PTableNA,\n DataType extends ValueTypeSupported = ValueTypeSupported,\n>(\n value: PTableValue<Absent, NA, DataType>,\n isNA?: (value: PTableValue<Absent, NA, DataType>) => value is NA,\n): value is PTableValueAxis<Absent, DataType> {\n return !(isNA ? isNA(value) : isPTableNA(value));\n}\n\nfunction pTableValueImpl<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n const valueType = column.type;\n if (valueType === ValueType.Bytes) {\n throw Error('Bytes not yet supported');\n }\n\n if (options && 'dataType' in options && options.dataType !== undefined && options.dataType !== valueType) {\n throw Error(`expected column of type ${options.dataType}, got ${valueType}`);\n }\n\n if (isValueAbsent(column, row)) {\n return options?.absent !== undefined ? options.absent : PTableAbsent;\n }\n\n if (isValueNA(column, row)) {\n return options?.na !== undefined ? options.na : PTableNA;\n }\n\n const value = column.data[row]!;\n switch (valueType) {\n case ValueType.Int:\n return value as PVectorDataInt[number];\n case ValueType.Long:\n return Number(value as PVectorDataLong[number]);\n case ValueType.Float:\n return value as PVectorDataFloat[number];\n case ValueType.Double:\n return value as PVectorDataDouble[number];\n case ValueType.String:\n return (value as PVectorDataString[number])!;\n }\n}\n\n/** Read PTableValue from PTable column at specified row */\nexport function pTableValue<DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n): DataType extends ValueTypeSupported ? PTableValue<PTableAbsent, PTableNA, DataType> : never;\nexport function pTableValue<FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n }\n): DataType extends ValueTypeSupported ? PTableValue<FillAbsent, PTableNA, DataType> : never;\nexport function pTableValue<FillNA, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValue<PTableAbsent, FillNA, DataType> : never;\nexport function pTableValue<FillNA, FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValue<FillAbsent, FillNA, DataType> : never;\nexport function pTableValue<FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n dataType: DataType;\n }\n): PTableValue<FillAbsent, PTableNA>;\nexport function pTableValue<FillNA, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n na: FillNA;\n dataType: DataType;\n }\n): PTableValue<PTableAbsent, FillNA, DataType>;\nexport function pTableValue<FillNA, FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n dataType: DataType;\n }\n): PTableValue<FillAbsent, FillNA, DataType>;\nexport function pTableValue<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n return pTableValueImpl(column, row, options);\n}\n\nexport function pTableValueBranded<DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n): DataType extends ValueTypeSupported ? PTableValueBranded<PTableAbsent, PTableNA, DataType> : never;\nexport function pTableValueBranded<FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<FillAbsent, PTableNA, DataType> : never;\nexport function pTableValueBranded<FillNA, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<PTableAbsent, FillNA, DataType> : never;\nexport function pTableValueBranded<FillNA, FillAbsent, DataType extends ValueType>(\n column: PTableVectorTyped<DataType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n }\n): DataType extends ValueTypeSupported ? PTableValueBranded<FillAbsent, FillNA, DataType> : never;\nexport function pTableValueBranded<FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n dataType: DataType;\n }\n): PTableValueBranded<FillAbsent, PTableNA>;\nexport function pTableValueBranded<FillNA, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n na: FillNA;\n dataType: DataType;\n }\n): PTableValueBranded<PTableAbsent, FillNA, DataType>;\nexport function pTableValueBranded<FillNA, FillAbsent, DataType extends ValueTypeSupported>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options: {\n absent: FillAbsent;\n na: FillNA;\n dataType: DataType;\n }\n): PTableValueBranded<FillAbsent, FillNA, DataType>;\nexport function pTableValueBranded<\n FillAbsent = PTableAbsent,\n FillNA = PTableNA,\n DataType extends ValueType = ValueTypeSupported,\n>(\n column: PTableVectorTyped<ValueType>,\n row: number,\n options?: {\n absent?: FillAbsent;\n na?: FillNA;\n dataType?: DataType;\n },\n) {\n return pTableValueImpl(column, row, options);\n}\n\n/** Used in requests to partially retrieve table's data */\nexport type TableRange = {\n /** Index of the first record to retrieve */\n readonly offset: number;\n\n /** Block length */\n readonly length: number;\n};\n\n/** Unified information about table shape */\nexport type PTableShape = {\n /** Number of unified table columns, including all axes and PColumn values */\n columns: number;\n\n /** Number of rows */\n rows: number;\n};\n"],"names":[],"mappings":";;AA4CA,SAAS,QAAQ,CAAC,SAAqB,EAAE,MAAc,EAAA;IACrD,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AACzC,IAAA,MAAM,IAAI,GAAG,CAAC,KAAK,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC;IACpC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,IAAI,IAAI,CAAC;AAC3C;AAEA,SAAS,aAAa,CAAC,MAAoB,EAAE,GAAW,EAAA;IACtD,OAAO,QAAQ,CAAC,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;AACrC;AAEA,SAAS,SAAS,CAAC,MAAoB,EAAE,GAAW,EAAA;IAClD,IAAI,MAAM,CAAC,IAAI;QAAE,OAAO,QAAQ,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC;;AAGlD,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI;IAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC;IAC9B,QAAQ,SAAS;QACf,KAAK,SAAS,CAAC,GAAG;AAChB,YAAA,OAAQ,KAAgC,KAAK,WAAW;QAC1D,KAAK,SAAS,CAAC,IAAI;AACjB,YAAA,OAAQ,KAAiC,KAAK,CAAC,iBAAiB;QAClE,KAAK,SAAS,CAAC,KAAK;AAClB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAE,KAAkC,CAAC;QAC1D,KAAK,SAAS,CAAC,MAAM;AACnB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAE,KAAmC,CAAC;QAC3D,KAAK,SAAS,CAAC,MAAM;YACnB,OAAQ,KAAmC,KAAK,IAAI;QACtD,KAAK,SAAS,CAAC,KAAK;YAClB,OAAQ,KAAkC,KAAK,IAAI;AACrD,QAAA;AACE,YAAA,MAAM,KAAK,CAAC,CAAA,uBAAA,EAA0B,SAAyB,CAAA,CAAE,CAAC;;AAExE;MAEa,YAAY,GAAG,EAAE,IAAI,EAAE,QAAQ;AAG5C;AACM,SAAU,cAAc,CAAC,KAAc,EAAA;AAC3C,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,IAAI,MAAM,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ;AAClG;AAEO,MAAM,QAAQ,GAAG;AAGxB;AACM,SAAU,UAAU,CAAC,KAAc,EAAA;IACvC,OAAO,KAAK,KAAK,QAAQ;AAC3B;AAwCM,SAAU,iBAAiB,CAK/B,KAAwC,EACxC,IAAgE,EAAA;AAEhE,IAAA,OAAO,EAAE,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;AAClD;AAEA,SAAS,eAAe,CAKtB,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;AAED,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI;AAC7B,IAAA,IAAI,SAAS,KAAK,SAAS,CAAC,KAAK,EAAE;AACjC,QAAA,MAAM,KAAK,CAAC,yBAAyB,CAAC;IACxC;AAEA,IAAA,IAAI,OAAO,IAAI,UAAU,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,KAAK,SAAS,EAAE;QACxG,MAAM,KAAK,CAAC,CAAA,wBAAA,EAA2B,OAAO,CAAC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,CAAE,CAAC;IAC9E;AAEA,IAAA,IAAI,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AAC9B,QAAA,OAAO,OAAO,EAAE,MAAM,KAAK,SAAS,GAAG,OAAO,CAAC,MAAM,GAAG,YAAY;IACtE;AAEA,IAAA,IAAI,SAAS,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE;AAC1B,QAAA,OAAO,OAAO,EAAE,EAAE,KAAK,SAAS,GAAG,OAAO,CAAC,EAAE,GAAG,QAAQ;IAC1D;IAEA,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAE;IAC/B,QAAQ,SAAS;QACf,KAAK,SAAS,CAAC,GAAG;AAChB,YAAA,OAAO,KAA+B;QACxC,KAAK,SAAS,CAAC,IAAI;AACjB,YAAA,OAAO,MAAM,CAAC,KAAgC,CAAC;QACjD,KAAK,SAAS,CAAC,KAAK;AAClB,YAAA,OAAO,KAAiC;QAC1C,KAAK,SAAS,CAAC,MAAM;AACnB,YAAA,OAAO,KAAkC;QAC3C,KAAK,SAAS,CAAC,MAAM;AACnB,YAAA,OAAQ,KAAoC;;AAElD;SAsDgB,WAAW,CAKzB,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;IAED,OAAO,eAAe,CAAC,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC;AAC9C;SAqDgB,kBAAkB,CAKhC,MAAoC,EACpC,GAAW,EACX,OAIC,EAAA;IAED,OAAO,eAAe,CAAC,MAAM,EAAE,GAAG,EAAE,OAAO,CAAC;AAC9C;;;;"}
|