xnd 0.2.0dev6 → 0.2.0dev7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (74) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +2 -0
  3. data/Rakefile +1 -1
  4. data/ext/ruby_xnd/GPATH +0 -0
  5. data/ext/ruby_xnd/GRTAGS +0 -0
  6. data/ext/ruby_xnd/GTAGS +0 -0
  7. data/ext/ruby_xnd/extconf.rb +8 -5
  8. data/ext/ruby_xnd/gc_guard.c +53 -2
  9. data/ext/ruby_xnd/gc_guard.h +8 -2
  10. data/ext/ruby_xnd/include/overflow.h +147 -0
  11. data/ext/ruby_xnd/include/ruby_xnd.h +62 -0
  12. data/ext/ruby_xnd/include/xnd.h +590 -0
  13. data/ext/ruby_xnd/lib/libxnd.a +0 -0
  14. data/ext/ruby_xnd/lib/libxnd.so +1 -0
  15. data/ext/ruby_xnd/lib/libxnd.so.0 +1 -0
  16. data/ext/ruby_xnd/lib/libxnd.so.0.2.0dev3 +0 -0
  17. data/ext/ruby_xnd/ruby_xnd.c +556 -47
  18. data/ext/ruby_xnd/ruby_xnd.h +2 -1
  19. data/ext/ruby_xnd/xnd/Makefile +80 -0
  20. data/ext/ruby_xnd/xnd/config.h +26 -0
  21. data/ext/ruby_xnd/xnd/config.h.in +3 -0
  22. data/ext/ruby_xnd/xnd/config.log +421 -0
  23. data/ext/ruby_xnd/xnd/config.status +1023 -0
  24. data/ext/ruby_xnd/xnd/configure +376 -8
  25. data/ext/ruby_xnd/xnd/configure.ac +48 -7
  26. data/ext/ruby_xnd/xnd/doc/xnd/index.rst +3 -1
  27. data/ext/ruby_xnd/xnd/doc/xnd/{types.rst → xnd.rst} +3 -18
  28. data/ext/ruby_xnd/xnd/libxnd/Makefile +142 -0
  29. data/ext/ruby_xnd/xnd/libxnd/Makefile.in +43 -3
  30. data/ext/ruby_xnd/xnd/libxnd/Makefile.vc +19 -3
  31. data/ext/ruby_xnd/xnd/libxnd/bitmaps.c +42 -3
  32. data/ext/ruby_xnd/xnd/libxnd/bitmaps.o +0 -0
  33. data/ext/ruby_xnd/xnd/libxnd/bounds.c +366 -0
  34. data/ext/ruby_xnd/xnd/libxnd/bounds.o +0 -0
  35. data/ext/ruby_xnd/xnd/libxnd/contrib.h +98 -0
  36. data/ext/ruby_xnd/xnd/libxnd/contrib/bfloat16.h +213 -0
  37. data/ext/ruby_xnd/xnd/libxnd/copy.c +155 -4
  38. data/ext/ruby_xnd/xnd/libxnd/copy.o +0 -0
  39. data/ext/ruby_xnd/xnd/libxnd/cuda/cuda_memory.cu +121 -0
  40. data/ext/ruby_xnd/xnd/libxnd/cuda/cuda_memory.h +58 -0
  41. data/ext/ruby_xnd/xnd/libxnd/equal.c +195 -7
  42. data/ext/ruby_xnd/xnd/libxnd/equal.o +0 -0
  43. data/ext/ruby_xnd/xnd/libxnd/inline.h +32 -0
  44. data/ext/ruby_xnd/xnd/libxnd/libxnd.a +0 -0
  45. data/ext/ruby_xnd/xnd/libxnd/libxnd.so +1 -0
  46. data/ext/ruby_xnd/xnd/libxnd/libxnd.so.0 +1 -0
  47. data/ext/ruby_xnd/xnd/libxnd/libxnd.so.0.2.0dev3 +0 -0
  48. data/ext/ruby_xnd/xnd/libxnd/shape.c +207 -0
  49. data/ext/ruby_xnd/xnd/libxnd/shape.o +0 -0
  50. data/ext/ruby_xnd/xnd/libxnd/split.c +2 -2
  51. data/ext/ruby_xnd/xnd/libxnd/split.o +0 -0
  52. data/ext/ruby_xnd/xnd/libxnd/tests/Makefile +39 -0
  53. data/ext/ruby_xnd/xnd/libxnd/xnd.c +613 -91
  54. data/ext/ruby_xnd/xnd/libxnd/xnd.h +145 -4
  55. data/ext/ruby_xnd/xnd/libxnd/xnd.o +0 -0
  56. data/ext/ruby_xnd/xnd/python/test_xnd.py +1125 -50
  57. data/ext/ruby_xnd/xnd/python/xnd/__init__.py +609 -124
  58. data/ext/ruby_xnd/xnd/python/xnd/_version.py +1 -0
  59. data/ext/ruby_xnd/xnd/python/xnd/_xnd.c +1652 -101
  60. data/ext/ruby_xnd/xnd/python/xnd/libxnd.a +0 -0
  61. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so +1 -0
  62. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so.0 +1 -0
  63. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so.0.2.0dev3 +0 -0
  64. data/ext/ruby_xnd/xnd/python/xnd/pyxnd.h +1 -1
  65. data/ext/ruby_xnd/xnd/python/xnd/util.h +25 -0
  66. data/ext/ruby_xnd/xnd/python/xnd/xnd.h +590 -0
  67. data/ext/ruby_xnd/xnd/python/xnd_randvalue.py +106 -6
  68. data/ext/ruby_xnd/xnd/python/xnd_support.py +4 -0
  69. data/ext/ruby_xnd/xnd/setup.py +46 -4
  70. data/lib/ruby_xnd.so +0 -0
  71. data/lib/xnd.rb +39 -3
  72. data/lib/xnd/version.rb +2 -2
  73. data/xnd.gemspec +2 -1
  74. metadata +58 -5
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/xnd/python/xnd/libxnd.so.0.2.0dev3
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/xnd/python/xnd/libxnd.so.0.2.0dev3
@@ -108,7 +108,7 @@ typedef struct {
108
108
 
109
109
  #define Xnd_EmptyFromType_INDEX 3
110
110
  #define Xnd_EmptyFromType_RETURN PyObject *
111
- #define Xnd_EmptyFromType_ARGS (PyTypeObject *, ndt_t *t)
111
+ #define Xnd_EmptyFromType_ARGS (PyTypeObject *, const ndt_t *t, uint32_t flags)
112
112
 
113
113
  #define Xnd_ViewMoveNdt_INDEX 4
114
114
  #define Xnd_ViewMoveNdt_RETURN PyObject *
@@ -36,6 +36,7 @@
36
36
 
37
37
 
38
38
  #include <Python.h>
39
+ #include <longintrepr.h>
39
40
  #include <stdlib.h>
40
41
  #include <stdint.h>
41
42
  #include <inttypes.h>
@@ -178,5 +179,29 @@ pyslice_unpack(PyObject *_r,
178
179
  return 0;
179
180
  }
180
181
 
182
+ /* longobject.c */
183
+ static inline int
184
+ long_compare(PyLongObject *a, PyLongObject *b)
185
+ {
186
+ Py_ssize_t sign;
187
+
188
+ if (Py_SIZE(a) != Py_SIZE(b)) {
189
+ sign = Py_SIZE(a) - Py_SIZE(b);
190
+ }
191
+ else {
192
+ Py_ssize_t i = Py_ABS(Py_SIZE(a));
193
+ while (--i >= 0 && a->ob_digit[i] == b->ob_digit[i])
194
+ ;
195
+ if (i < 0)
196
+ sign = 0;
197
+ else {
198
+ sign = (sdigit)a->ob_digit[i] - (sdigit)b->ob_digit[i];
199
+ if (Py_SIZE(a) < 0)
200
+ sign = -sign;
201
+ }
202
+ }
203
+ return sign < 0 ? -1 : sign > 0 ? 1 : 0;
204
+ }
205
+
181
206
 
182
207
  #endif /* UTIL_H */
@@ -0,0 +1,590 @@
1
+ /*
2
+ * BSD 3-Clause License
3
+ *
4
+ * Copyright (c) 2017-2018, plures
5
+ * All rights reserved.
6
+ *
7
+ * Redistribution and use in source and binary forms, with or without
8
+ * modification, are permitted provided that the following conditions are met:
9
+ *
10
+ * 1. Redistributions of source code must retain the above copyright notice,
11
+ * this list of conditions and the following disclaimer.
12
+ *
13
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
14
+ * this list of conditions and the following disclaimer in the documentation
15
+ * and/or other materials provided with the distribution.
16
+ *
17
+ * 3. Neither the name of the copyright holder nor the names of its
18
+ * contributors may be used to endorse or promote products derived from
19
+ * this software without specific prior written permission.
20
+ *
21
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
+ */
32
+
33
+
34
+ #ifndef XND_H
35
+ #define XND_H
36
+
37
+
38
+ #ifdef __cplusplus
39
+ extern "C" {
40
+ #endif
41
+
42
+ #include <stdlib.h>
43
+ #include <stdint.h>
44
+ #include <string.h>
45
+ #include <assert.h>
46
+ #include "ndtypes.h"
47
+
48
+ #ifdef __cplusplus
49
+ #include <cstdint>
50
+ #else
51
+ #include <stdint.h>
52
+ #include <inttypes.h>
53
+ #endif
54
+
55
+
56
+ #ifdef _MSC_VER
57
+ #if defined (XND_EXPORT)
58
+ #define XND_API __declspec(dllexport)
59
+ #elif defined(XND_IMPORT)
60
+ #define XND_API __declspec(dllimport)
61
+ #else
62
+ #define XND_API
63
+ #endif
64
+
65
+ #include "malloc.h"
66
+ #define ALLOCA(type, name, nmemb) type *name = _alloca(nmemb * sizeof(type))
67
+ #else
68
+ #define XND_API
69
+
70
+ #define ALLOCA(type, name, nmemb) type name[nmemb]
71
+ #endif
72
+
73
+
74
+ #if SIZE_MAX == UINT64_MAX
75
+ #define XND_SSIZE_MAX INT64_MAX
76
+ #elif SIZE_MAX == UINT32_MAX
77
+ #define XND_SSIZE_MAX INT32_MAX
78
+ #else
79
+ #error "unsupported platform: need 32-bit or 64-bit size_t"
80
+ #endif
81
+
82
+
83
+ /*
84
+ * Ownership flags: The library itself has no notion of how many exported
85
+ * views a master buffer has. The Python bindings for example use Pythons's
86
+ * reference counting to to keep track of exported memory blocks.
87
+ */
88
+ #define XND_OWN_TYPE 0x00000001U /* type pointer */
89
+ #define XND_OWN_DATA 0x00000002U /* data pointer */
90
+ #define XND_OWN_STRINGS 0x00000004U /* embedded string pointers */
91
+ #define XND_OWN_BYTES 0x00000008U /* embedded bytes pointers */
92
+ #define XND_OWN_ARRAYS 0x00000010U /* embedded array pointers */
93
+ #define XND_OWN_POINTERS 0x00000020U /* embedded pointers */
94
+ #define XND_CUDA_MANAGED 0x00000040U /* cuda managed memory */
95
+
96
+ #define XND_OWN_ALL (XND_OWN_TYPE | \
97
+ XND_OWN_DATA | \
98
+ XND_OWN_STRINGS | \
99
+ XND_OWN_BYTES | \
100
+ XND_OWN_ARRAYS | \
101
+ XND_OWN_POINTERS)
102
+
103
+ #define XND_OWN_EMBEDDED (XND_OWN_DATA | \
104
+ XND_OWN_STRINGS | \
105
+ XND_OWN_BYTES | \
106
+ XND_OWN_ARRAYS | \
107
+ XND_OWN_POINTERS)
108
+
109
+
110
+ /* Convenience macros to extract embedded values. */
111
+ #define XND_POINTER_DATA(ptr) (*((char **)ptr))
112
+ #define XND_STRING_DATA(ptr) ((*((const char **)ptr)) == NULL ? "" : (*((const char **)ptr)))
113
+ #define XND_BYTES_SIZE(ptr) (((ndt_bytes_t *)ptr)->size)
114
+ #define XND_BYTES_DATA(ptr) (((ndt_bytes_t *)ptr)->data)
115
+ #define XND_ARRAY_SHAPE(ptr) (((ndt_array_t *)ptr)->shape)
116
+ #define XND_ARRAY_DATA(ptr) (((ndt_array_t *)ptr)->data)
117
+ #define XND_UNION_TAG(ptr) (*((uint8_t *)ptr))
118
+
119
+
120
+ /* Bitmap tree. */
121
+ typedef struct xnd_bitmap xnd_bitmap_t;
122
+
123
+ struct xnd_bitmap {
124
+ uint8_t *data; /* bitmap */
125
+ int64_t size; /* number of subtree bitmaps in the "next" array */
126
+ xnd_bitmap_t *next; /* array of bitmaps for subtrees */
127
+ };
128
+
129
+ /* Typed memory block, usually a view. */
130
+ typedef struct xnd {
131
+ xnd_bitmap_t bitmap; /* bitmap tree */
132
+ int64_t index; /* linear index for var dims */
133
+ const ndt_t *type; /* type of the data */
134
+ char *ptr; /* data */
135
+ } xnd_t;
136
+
137
+ /* Master memory block. */
138
+ typedef struct xnd_master {
139
+ uint32_t flags; /* ownership flags */
140
+ xnd_t master; /* typed memory */
141
+ } xnd_master_t;
142
+
143
+ /* Used in indexing and slicing. */
144
+ enum xnd_key { Index, FieldName, Slice };
145
+ typedef struct {
146
+ enum xnd_key tag;
147
+ union {
148
+ int64_t Index;
149
+ const char *FieldName;
150
+ ndt_slice_t Slice;
151
+ };
152
+ } xnd_index_t;
153
+
154
+
155
+ /* Unstable API: view with ownership tracking. */
156
+ typedef struct xnd_view {
157
+ uint32_t flags; /* flags that indicate resource ownership by the view */
158
+ const void *obj; /* object that holds shared resources */
159
+ xnd_t view; /* typed memory */
160
+ } xnd_view_t;
161
+
162
+
163
+ /*****************************************************************************/
164
+ /* Create xnd memory blocks */
165
+ /*****************************************************************************/
166
+
167
+ XND_API xnd_master_t *xnd_empty_from_string(const char *s, uint32_t flags, ndt_context_t *ctx);
168
+ XND_API xnd_master_t *xnd_empty_from_type(const ndt_t *t, uint32_t flags, ndt_context_t *ctx);
169
+ XND_API void xnd_clear(xnd_t * const x, const uint32_t flags);
170
+ XND_API void xnd_del(xnd_master_t *x);
171
+
172
+ /* Create and delete pristine xnd_t buffers. */
173
+ XND_API xnd_master_t *xnd_from_xnd(xnd_t *src, uint32_t flags, ndt_context_t *ctx);
174
+ XND_API void xnd_del_buffer(xnd_t *x, uint32_t flags);
175
+
176
+
177
+ /*****************************************************************************/
178
+ /* Traverse xnd memory blocks */
179
+ /*****************************************************************************/
180
+
181
+ XND_API bool have_stored_index(const ndt_t *t);
182
+ XND_API int64_t get_stored_index(const ndt_t *t);
183
+ XND_API xnd_t apply_stored_index(const xnd_t *x, ndt_context_t *ctx);
184
+ XND_API xnd_t apply_stored_indices(const xnd_t *x, ndt_context_t *ctx);
185
+
186
+ XND_API xnd_t xnd_subtree_index(const xnd_t *x, const int64_t *indices, int len,
187
+ ndt_context_t *ctx);
188
+
189
+ XND_API xnd_t xnd_subtree(const xnd_t *x, const xnd_index_t indices[], int len,
190
+ ndt_context_t *ctx);
191
+
192
+ XND_API xnd_t xnd_subscript(const xnd_t *x, const xnd_index_t indices[], int len,
193
+ ndt_context_t *ctx);
194
+
195
+ XND_API xnd_t xnd_reshape(const xnd_t *x, int64_t shape[], int ndim, char order, ndt_context_t *ctx);
196
+
197
+ XND_API xnd_t *xnd_split(const xnd_t *x, int64_t *n, int max_outer, ndt_context_t *ctx);
198
+
199
+ XND_API int xnd_equal(const xnd_t *x, const xnd_t *y, ndt_context_t *ctx);
200
+ XND_API int xnd_strict_equal(const xnd_t *x, const xnd_t *y, ndt_context_t *ctx);
201
+
202
+ XND_API int xnd_copy(xnd_t *y, const xnd_t *x, uint32_t flags, ndt_context_t *ctx);
203
+
204
+
205
+ /*****************************************************************************/
206
+ /* Bounds checking */
207
+ /*****************************************************************************/
208
+
209
+ XND_API int xnd_bounds_check(const ndt_t *t, const int64_t linear_index,
210
+ const int64_t bufsize, ndt_context_t *ctx);
211
+
212
+
213
+ /*****************************************************************************/
214
+ /* Bitmaps */
215
+ /*****************************************************************************/
216
+
217
+ XND_API int xnd_bitmap_init(xnd_bitmap_t *b, const ndt_t *t, ndt_context_t *ctx);
218
+ XND_API void xnd_bitmap_clear(xnd_bitmap_t *b);
219
+ XND_API xnd_bitmap_t xnd_bitmap_next(const xnd_t *x, int64_t i, ndt_context_t *ctx);
220
+ XND_API void xnd_set_valid(xnd_t *x);
221
+ XND_API void xnd_set_na(xnd_t *x);
222
+ XND_API int xnd_is_valid(const xnd_t *x);
223
+ XND_API int xnd_is_na(const xnd_t *x);
224
+
225
+
226
+ /*****************************************************************************/
227
+ /* Error handling */
228
+ /*****************************************************************************/
229
+
230
+ XND_API extern const xnd_t xnd_error;
231
+ XND_API extern const xnd_bitmap_t xnd_bitmap_empty;
232
+
233
+ XND_API int xnd_err_occurred(const xnd_t *x);
234
+
235
+
236
+ /*****************************************************************************/
237
+ /* Unstable API */
238
+ /*****************************************************************************/
239
+
240
+ XND_API extern const xnd_view_t xnd_view_error;
241
+
242
+ XND_API int xnd_view_err_occurred(const xnd_view_t *x);
243
+ XND_API void xnd_view_clear(xnd_view_t *x);
244
+ XND_API xnd_view_t xnd_view_from_xnd(const void *obj, const xnd_t *x);
245
+ XND_API xnd_view_t xnd_view_subscript(const xnd_view_t *x, const xnd_index_t indices[],
246
+ int len, ndt_context_t *ctx);
247
+
248
+
249
+
250
+ /*****************************************************************************/
251
+ /* Float format */
252
+ /*****************************************************************************/
253
+
254
+ XND_API int xnd_init_float(ndt_context_t *ctx);
255
+ XND_API bool xnd_float_is_little_endian(void);
256
+ XND_API bool xnd_float_is_big_endian(void);
257
+ XND_API bool xnd_double_is_little_endian(void);
258
+ XND_API bool xnd_double_is_big_endian(void);
259
+
260
+
261
+ /*****************************************************************************/
262
+ /* BFloat16 */
263
+ /*****************************************************************************/
264
+
265
+ XND_API void xnd_bfloat_pack(char *p, double x);
266
+ XND_API double xnd_bfloat_unpack(char *p);
267
+
268
+
269
+ /*****************************************************************************/
270
+ /* Cuda */
271
+ /*****************************************************************************/
272
+
273
+ void *xnd_cuda_calloc_managed(uint16_t align, int64_t size, ndt_context_t *ctx);
274
+ void xnd_cuda_free(void *ptr);
275
+ int xnd_cuda_mem_prefetch_async(const void *ptr, int64_t count, int dev, ndt_context_t *ctx);
276
+ int xnd_cuda_device_synchronize(ndt_context_t *ctx);
277
+
278
+
279
+ /*****************************************************************************/
280
+ /* Static inline functions */
281
+ /*****************************************************************************/
282
+
283
+ /* Check index bounds and adjust negative indices. */
284
+ static inline int64_t
285
+ adjust_index(const int64_t i, const int64_t shape, ndt_context_t *ctx)
286
+ {
287
+ const int64_t k = i < 0 ? i + shape : i;
288
+
289
+ if (k < 0 || k >= shape || k > XND_SSIZE_MAX) {
290
+ ndt_err_format(ctx, NDT_IndexError,
291
+ "index with value %" PRIi64 " out of bounds", i);
292
+ return -1;
293
+ }
294
+
295
+ return k;
296
+ }
297
+
298
+ /*
299
+ * This looks inefficient, but both gcc and clang clean up unused xnd_t members.
300
+ */
301
+ static inline int64_t
302
+ xnd_ndim(const xnd_t *x)
303
+ {
304
+ return x->type->ndim;
305
+ }
306
+
307
+ static inline xnd_t
308
+ xnd_fixed_dim_next(const xnd_t *x, const int64_t i)
309
+ {
310
+ const ndt_t *t = x->type;
311
+ const ndt_t *u = t->FixedDim.type;
312
+ const int64_t step = i * t->Concrete.FixedDim.step;
313
+ xnd_t next;
314
+
315
+ assert(t->tag == FixedDim);
316
+
317
+ next.bitmap = x->bitmap;
318
+ next.index = x->index + step;
319
+ next.type = u;
320
+ next.ptr = u->ndim==0 ? x->ptr + next.index * next.type->datasize : x->ptr;
321
+
322
+ return next;
323
+ }
324
+
325
+ static inline int64_t
326
+ xnd_fixed_shape(const xnd_t *x)
327
+ {
328
+ const ndt_t *t = x->type;
329
+ assert(t->tag == FixedDim);
330
+ return t->FixedDim.shape;
331
+ }
332
+
333
+ static inline int64_t
334
+ xnd_fixed_step(const xnd_t *x)
335
+ {
336
+ const ndt_t *t = x->type;
337
+ assert(t->tag == FixedDim);
338
+ return t->Concrete.FixedDim.step;
339
+ }
340
+
341
+ static inline int64_t
342
+ xnd_fixed_shape_at(const xnd_t *x, const int i)
343
+ {
344
+ const ndt_t *t = x->type;
345
+
346
+ assert(0 <= i && i < t->ndim);
347
+ assert(t->tag == FixedDim);
348
+
349
+ for (int k = 0; k < i; k++) {
350
+ t = t->FixedDim.type;
351
+ }
352
+ return t->FixedDim.shape;
353
+ }
354
+
355
+ static inline int64_t
356
+ xnd_fixed_stride(const xnd_t *x)
357
+ {
358
+ const ndt_t *t = x->type;
359
+ assert(t->tag == FixedDim);
360
+ return t->Concrete.FixedDim.step * t->Concrete.FixedDim.itemsize;
361
+ }
362
+
363
+ static inline char *
364
+ xnd_fixed_apply_index(const xnd_t *x)
365
+ {
366
+ assert(x->type->tag == FixedDim);
367
+ return x->ptr + x->index * x->type->Concrete.FixedDim.itemsize;
368
+ }
369
+
370
+ static inline xnd_t
371
+ xnd_var_dim_next(const xnd_t *x, const int64_t start, const int64_t step,
372
+ const int64_t i)
373
+ {
374
+ const ndt_t *t = x->type;
375
+ const ndt_t *u = t->VarDim.type;
376
+ xnd_t next;
377
+
378
+ next.bitmap = x->bitmap;
379
+ next.index = start + i * step;
380
+ next.type = u;
381
+ next.ptr = u->ndim==0 ? x->ptr + next.index * next.type->datasize : x->ptr;
382
+
383
+ return next;
384
+ }
385
+
386
+ static inline xnd_t
387
+ xnd_tuple_next(const xnd_t *x, const int64_t i, ndt_context_t *ctx)
388
+ {
389
+ const ndt_t *t = x->type;
390
+ xnd_t next;
391
+
392
+ next.bitmap = xnd_bitmap_next(x, i, ctx);
393
+ if (ndt_err_occurred(ctx)) {
394
+ return xnd_error;
395
+ }
396
+
397
+ next.index = 0;
398
+ next.type = t->Tuple.types[i];
399
+ next.ptr = x->ptr + t->Concrete.Tuple.offset[i];
400
+
401
+ return next;
402
+ }
403
+
404
+ static inline xnd_t
405
+ xnd_record_next(const xnd_t *x, const int64_t i, ndt_context_t *ctx)
406
+ {
407
+ const ndt_t *t = x->type;
408
+ xnd_t next;
409
+
410
+ next.bitmap = xnd_bitmap_next(x, i, ctx);
411
+ if (ndt_err_occurred(ctx)) {
412
+ return xnd_error;
413
+ }
414
+
415
+ next.index = 0;
416
+ next.type = t->Record.types[i];
417
+ next.ptr = x->ptr + t->Concrete.Record.offset[i];
418
+
419
+ return next;
420
+ }
421
+
422
+ static inline xnd_t
423
+ xnd_union_next(const xnd_t *x, ndt_context_t *ctx)
424
+ {
425
+ uint8_t i = XND_UNION_TAG(x->ptr);
426
+ const ndt_t *t = x->type;
427
+ xnd_t next;
428
+
429
+ next.bitmap = xnd_bitmap_next(x, i, ctx);
430
+ if (ndt_err_occurred(ctx)) {
431
+ return xnd_error;
432
+ }
433
+
434
+ next.index = 0;
435
+ next.type = t->Union.types[i];
436
+ next.ptr = x->ptr+1;
437
+
438
+ return next;
439
+ }
440
+
441
+ static inline xnd_t
442
+ xnd_ref_next(const xnd_t *x, ndt_context_t *ctx)
443
+ {
444
+ const ndt_t *t = x->type;
445
+ xnd_t next;
446
+
447
+ next.bitmap = xnd_bitmap_next(x, 0, ctx);
448
+ if (ndt_err_occurred(ctx)) {
449
+ return xnd_error;
450
+ }
451
+
452
+ next.index = 0;
453
+ next.type = t->Ref.type;
454
+ next.ptr = XND_POINTER_DATA(x->ptr);
455
+
456
+ return next;
457
+ }
458
+
459
+ static inline xnd_t
460
+ xnd_constr_next(const xnd_t *x, ndt_context_t *ctx)
461
+ {
462
+ const ndt_t *t = x->type;
463
+ xnd_t next;
464
+
465
+ next.bitmap = xnd_bitmap_next(x, 0, ctx);
466
+ if (ndt_err_occurred(ctx)) {
467
+ return xnd_error;
468
+ }
469
+
470
+ next.index = 0;
471
+ next.type = t->Constr.type;
472
+ next.ptr = x->ptr;
473
+
474
+ return next;
475
+ }
476
+
477
+ static inline xnd_t
478
+ xnd_nominal_next(const xnd_t *x, ndt_context_t *ctx)
479
+ {
480
+ const ndt_t *t = x->type;
481
+ xnd_t next;
482
+
483
+ next.bitmap = xnd_bitmap_next(x, 0, ctx);
484
+ if (ndt_err_occurred(ctx)) {
485
+ return xnd_error;
486
+ }
487
+
488
+ next.index = 0;
489
+ next.type = t->Nominal.type;
490
+ next.ptr = x->ptr;
491
+
492
+ return next;
493
+ }
494
+
495
+ static inline xnd_t
496
+ xnd_array_next(const xnd_t *x, const int64_t i)
497
+ {
498
+ const ndt_t *t = x->type;
499
+ const ndt_t *u = t->Array.type;
500
+ xnd_t next;
501
+
502
+ assert(t->tag == Array);
503
+
504
+ next.bitmap = x->bitmap;
505
+ next.index = 0;
506
+ next.type = u;
507
+ next.ptr = XND_ARRAY_DATA(x->ptr) + i * next.type->datasize;
508
+
509
+ return next;
510
+ }
511
+
512
+ #if NDT_SYS_BIG_ENDIAN == 1
513
+ #define XND_REV_COND NDT_LITTLE_ENDIAN
514
+ #else
515
+ #define XND_REV_COND NDT_BIG_ENDIAN
516
+ #endif
517
+
518
+ static inline void
519
+ memcpy_rev(char *dest, const char *src, size_t size)
520
+ {
521
+ size_t i;
522
+
523
+ for (i = 0; i < size; i++) {
524
+ dest[i] = src[size-1-i];
525
+ }
526
+ }
527
+
528
+ static inline void
529
+ bcopy_swap(char *dest, const char *src, size_t size, uint32_t flags)
530
+ {
531
+ if (flags & XND_REV_COND) {
532
+ memcpy_rev(dest, src, size);
533
+ }
534
+ else {
535
+ memcpy(dest, src, size);
536
+ }
537
+ }
538
+
539
+ static inline int
540
+ le(uint32_t flags)
541
+ {
542
+ #if NDT_SYS_BIG_ENDIAN == 1
543
+ return flags & NDT_LITTLE_ENDIAN;
544
+ #else
545
+ return !(flags & NDT_BIG_ENDIAN);
546
+ #endif
547
+ }
548
+
549
+
550
+ #define PACK_SINGLE(ptr, src, type, flags) \
551
+ do { \
552
+ type _x; \
553
+ _x = (type)src; \
554
+ bcopy_swap(ptr, (const char *)&_x, sizeof _x, flags); \
555
+ } while (0)
556
+
557
+ #define UNPACK_SINGLE(dest, ptr, type, flags) \
558
+ do { \
559
+ type _x; \
560
+ bcopy_swap((char *)&_x, ptr, sizeof _x, flags); \
561
+ dest = _x; \
562
+ } while (0)
563
+
564
+ #define APPLY_STORED_INDICES_INT(x) \
565
+ xnd_t _##x##tail; \
566
+ if (have_stored_index(x->type)) { \
567
+ _##x##tail = apply_stored_indices(x, ctx); \
568
+ if (xnd_err_occurred(&_##x##tail)) { \
569
+ return -1; \
570
+ } \
571
+ x = &_##x##tail; \
572
+ }
573
+
574
+ #define APPLY_STORED_INDICES_XND(x) \
575
+ xnd_t _##x##tail; \
576
+ if (have_stored_index(x->type)) { \
577
+ _##x##tail = apply_stored_indices(x, ctx); \
578
+ if (xnd_err_occurred(&_##x##tail)) { \
579
+ return xnd_error; \
580
+ } \
581
+ x = &_##x##tail; \
582
+ }
583
+
584
+
585
+ #ifdef __cplusplus
586
+ } /* END extern "C" */
587
+ #endif
588
+
589
+
590
+ #endif /* XND_H */