xnd 0.2.0dev3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (93) hide show
  1. checksums.yaml +7 -0
  2. data/CONTRIBUTING.md +42 -0
  3. data/Gemfile +3 -0
  4. data/History.md +0 -0
  5. data/README.md +7 -0
  6. data/Rakefile +135 -0
  7. data/ext/ruby_xnd/extconf.rb +70 -0
  8. data/ext/ruby_xnd/float_pack_unpack.c +277 -0
  9. data/ext/ruby_xnd/float_pack_unpack.h +39 -0
  10. data/ext/ruby_xnd/gc_guard.c +36 -0
  11. data/ext/ruby_xnd/gc_guard.h +12 -0
  12. data/ext/ruby_xnd/include/xnd.h +449 -0
  13. data/ext/ruby_xnd/lib/libxnd.a +0 -0
  14. data/ext/ruby_xnd/lib/libxnd.so +1 -0
  15. data/ext/ruby_xnd/lib/libxnd.so.0 +1 -0
  16. data/ext/ruby_xnd/lib/libxnd.so.0.2.0dev3 +0 -0
  17. data/ext/ruby_xnd/memory_block_object.c +32 -0
  18. data/ext/ruby_xnd/memory_block_object.h +33 -0
  19. data/ext/ruby_xnd/ruby_xnd.c +1953 -0
  20. data/ext/ruby_xnd/ruby_xnd.h +61 -0
  21. data/ext/ruby_xnd/ruby_xnd_internal.h +85 -0
  22. data/ext/ruby_xnd/util.h +170 -0
  23. data/ext/ruby_xnd/xnd/AUTHORS.txt +5 -0
  24. data/ext/ruby_xnd/xnd/INSTALL.txt +134 -0
  25. data/ext/ruby_xnd/xnd/LICENSE.txt +29 -0
  26. data/ext/ruby_xnd/xnd/MANIFEST.in +3 -0
  27. data/ext/ruby_xnd/xnd/Makefile.in +80 -0
  28. data/ext/ruby_xnd/xnd/README.rst +44 -0
  29. data/ext/ruby_xnd/xnd/config.guess +1530 -0
  30. data/ext/ruby_xnd/xnd/config.h.in +22 -0
  31. data/ext/ruby_xnd/xnd/config.sub +1782 -0
  32. data/ext/ruby_xnd/xnd/configure +4867 -0
  33. data/ext/ruby_xnd/xnd/configure.ac +164 -0
  34. data/ext/ruby_xnd/xnd/doc/Makefile +14 -0
  35. data/ext/ruby_xnd/xnd/doc/_static/copybutton.js +66 -0
  36. data/ext/ruby_xnd/xnd/doc/conf.py +26 -0
  37. data/ext/ruby_xnd/xnd/doc/index.rst +44 -0
  38. data/ext/ruby_xnd/xnd/doc/libxnd/data-structures.rst +186 -0
  39. data/ext/ruby_xnd/xnd/doc/libxnd/functions.rst +148 -0
  40. data/ext/ruby_xnd/xnd/doc/libxnd/index.rst +25 -0
  41. data/ext/ruby_xnd/xnd/doc/releases/index.rst +34 -0
  42. data/ext/ruby_xnd/xnd/doc/xnd/align-pack.rst +96 -0
  43. data/ext/ruby_xnd/xnd/doc/xnd/buffer-protocol.rst +42 -0
  44. data/ext/ruby_xnd/xnd/doc/xnd/index.rst +30 -0
  45. data/ext/ruby_xnd/xnd/doc/xnd/quickstart.rst +62 -0
  46. data/ext/ruby_xnd/xnd/doc/xnd/types.rst +674 -0
  47. data/ext/ruby_xnd/xnd/install-sh +527 -0
  48. data/ext/ruby_xnd/xnd/libxnd/Makefile.in +102 -0
  49. data/ext/ruby_xnd/xnd/libxnd/Makefile.vc +112 -0
  50. data/ext/ruby_xnd/xnd/libxnd/bitmaps.c +345 -0
  51. data/ext/ruby_xnd/xnd/libxnd/contrib.h +313 -0
  52. data/ext/ruby_xnd/xnd/libxnd/copy.c +944 -0
  53. data/ext/ruby_xnd/xnd/libxnd/equal.c +1216 -0
  54. data/ext/ruby_xnd/xnd/libxnd/inline.h +154 -0
  55. data/ext/ruby_xnd/xnd/libxnd/overflow.h +147 -0
  56. data/ext/ruby_xnd/xnd/libxnd/split.c +286 -0
  57. data/ext/ruby_xnd/xnd/libxnd/tests/Makefile.in +39 -0
  58. data/ext/ruby_xnd/xnd/libxnd/tests/Makefile.vc +44 -0
  59. data/ext/ruby_xnd/xnd/libxnd/tests/README.txt +2 -0
  60. data/ext/ruby_xnd/xnd/libxnd/tests/runtest.c +101 -0
  61. data/ext/ruby_xnd/xnd/libxnd/tests/test.h +48 -0
  62. data/ext/ruby_xnd/xnd/libxnd/tests/test_fixed.c +108 -0
  63. data/ext/ruby_xnd/xnd/libxnd/xnd.c +1304 -0
  64. data/ext/ruby_xnd/xnd/libxnd/xnd.h +449 -0
  65. data/ext/ruby_xnd/xnd/python/test_xnd.py +3144 -0
  66. data/ext/ruby_xnd/xnd/python/xnd/__init__.py +290 -0
  67. data/ext/ruby_xnd/xnd/python/xnd/_xnd.c +2822 -0
  68. data/ext/ruby_xnd/xnd/python/xnd/contrib/pretty.py +850 -0
  69. data/ext/ruby_xnd/xnd/python/xnd/docstrings.h +129 -0
  70. data/ext/ruby_xnd/xnd/python/xnd/pyxnd.h +200 -0
  71. data/ext/ruby_xnd/xnd/python/xnd/util.h +182 -0
  72. data/ext/ruby_xnd/xnd/python/xnd_randvalue.py +1121 -0
  73. data/ext/ruby_xnd/xnd/python/xnd_support.py +106 -0
  74. data/ext/ruby_xnd/xnd/setup.py +303 -0
  75. data/ext/ruby_xnd/xnd/vcbuild/INSTALL.txt +42 -0
  76. data/ext/ruby_xnd/xnd/vcbuild/runtest32.bat +16 -0
  77. data/ext/ruby_xnd/xnd/vcbuild/runtest64.bat +14 -0
  78. data/ext/ruby_xnd/xnd/vcbuild/vcbuild32.bat +29 -0
  79. data/ext/ruby_xnd/xnd/vcbuild/vcbuild64.bat +29 -0
  80. data/ext/ruby_xnd/xnd/vcbuild/vcclean.bat +13 -0
  81. data/ext/ruby_xnd/xnd/vcbuild/vcdistclean.bat +14 -0
  82. data/lib/ruby_xnd.so +0 -0
  83. data/lib/xnd.rb +306 -0
  84. data/lib/xnd/monkeys.rb +29 -0
  85. data/lib/xnd/version.rb +6 -0
  86. data/spec/debug_spec.rb +9 -0
  87. data/spec/gc_guard_spec.rb +10 -0
  88. data/spec/leakcheck.rb +9 -0
  89. data/spec/spec_helper.rb +877 -0
  90. data/spec/type_inference_spec.rb +81 -0
  91. data/spec/xnd_spec.rb +2921 -0
  92. data/xnd.gemspec +47 -0
  93. metadata +215 -0
Binary file
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/lib/libxnd.so.0.2.0dev3
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/lib/libxnd.so.0.2.0dev3
@@ -0,0 +1,32 @@
1
+ /* BSD 3-Clause License
2
+ *
3
+ * Copyright (c) 2018, Quansight and Sameer Deshmukh
4
+ * All rights reserved.
5
+ *
6
+ * Redistribution and use in source and binary forms, with or without
7
+ * modification, are permitted provided that the following conditions are met:
8
+ *
9
+ * * Redistributions of source code must retain the above copyright notice, this
10
+ * list of conditions and the following disclaimer.
11
+ *
12
+ * * Redistributions in binary form must reproduce the above copyright notice,
13
+ * this list of conditions and the following disclaimer in the documentation
14
+ * and/or other materials provided with the distribution.
15
+ *
16
+ * * Neither the name of the copyright holder nor the names of its
17
+ * contributors may be used to endorse or promote products derived from
18
+ * this software without specific prior written permission.
19
+ *
20
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+ */
31
+
32
+ /* File for various operations on the MemoryBlockObject struct. */
@@ -0,0 +1,33 @@
1
+ /* BSD 3-Clause License
2
+ *
3
+ * Copyright (c) 2018, Quansight and Sameer Deshmukh
4
+ * All rights reserved.
5
+ *
6
+ * Redistribution and use in source and binary forms, with or without
7
+ * modification, are permitted provided that the following conditions are met:
8
+ *
9
+ * * Redistributions of source code must retain the above copyright notice, this
10
+ * list of conditions and the following disclaimer.
11
+ *
12
+ * * Redistributions in binary form must reproduce the above copyright notice,
13
+ * this list of conditions and the following disclaimer in the documentation
14
+ * and/or other materials provided with the distribution.
15
+ *
16
+ * * Neither the name of the copyright holder nor the names of its
17
+ * contributors may be used to endorse or promote products derived from
18
+ * this software without specific prior written permission.
19
+ *
20
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+ */
31
+
32
+ /* Headers for MemoryBlockObject */
33
+
@@ -0,0 +1,1953 @@
1
+ /* BSD 3-Clause License
2
+ *
3
+ * Copyright (c) 2018, Quansight and Sameer Deshmukh
4
+ * All rights reserved.
5
+ *
6
+ * Redistribution and use in source and binary forms, with or without
7
+ * modification, are permitted provided that the following conditions are met:
8
+ *
9
+ * * Redistributions of source code must retain the above copyright notice, this
10
+ * list of conditions and the following disclaimer.
11
+ *
12
+ * * Redistributions in binary form must reproduce the above copyright notice,
13
+ * this list of conditions and the following disclaimer in the documentation
14
+ * and/or other materials provided with the distribution.
15
+ *
16
+ * * Neither the name of the copyright holder nor the names of its
17
+ * contributors may be used to endorse or promote products derived from
18
+ * this software without specific prior written permission.
19
+ *
20
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+ */
31
+
32
+ /* File containing the majority implementation of the Ruby XND wrapper.
33
+ *
34
+ * Author: Sameer Deshmukh (@v0dro)
35
+ */
36
+
37
+ #include "ruby_xnd_internal.h"
38
+ #include "xnd.h"
39
+
40
+ VALUE cRubyXND;
41
+ VALUE cXND;
42
+ static VALUE cRubyXND_MBlock;
43
+ static VALUE cRubyXND_Ellipsis;
44
+ static const rb_data_type_t MemoryBlockObject_type;
45
+ static const rb_data_type_t XndObject_type;
46
+
47
+ static VALUE rb_eValueError;
48
+
49
+ VALUE mRubyXND_GCGuard;
50
+
51
+ /****************************************************************************/
52
+ /* Error handling */
53
+ /****************************************************************************/
54
+ static VALUE
55
+ seterr(ndt_context_t *ctx)
56
+ {
57
+ return rb_ndtypes_set_error(ctx);
58
+ }
59
+
60
+ #ifdef XND_DEBUG
61
+ void
62
+ obj_inspect(const char* msg, VALUE obj)
63
+ {
64
+ VALUE insp = rb_funcall(obj, rb_intern("inspect"), 0, NULL);
65
+ printf("%s %s\n.", msg, StringValuePtr(insp));
66
+ }
67
+ #endif
68
+
69
+ /****************************************************************************/
70
+ /* Singletons */
71
+ /****************************************************************************/
72
+ static VALUE
73
+ xnd_ellipsis(void)
74
+ {
75
+ return rb_funcall(cRubyXND_Ellipsis, rb_intern("initialize"), 0, NULL);
76
+ }
77
+
78
+ /****************************************************************************/
79
+ /* MemoryBlock Object */
80
+ /****************************************************************************/
81
+
82
+ /* The MemoryBlockObject is shared among several XND views/objects. */
83
+ typedef struct MemoryBlockObject {
84
+ VALUE type; /* type owner (ndtype) */
85
+ xnd_master_t *xnd; /* memblock owner */
86
+ } MemoryBlockObject;
87
+
88
+ #define GET_MBLOCK(obj, mblock_p) do { \
89
+ TypedData_Get_Struct((obj), MemoryBlockObject, \
90
+ &MemoryBlockObject_type, (mblock_p)); \
91
+ } while (0)
92
+ #define MAKE_MBLOCK(self, mblock_p) TypedData_Make_Struct(self, MemoryBlockObject, \
93
+ &MemoryBlockObject_type, mblock_p)
94
+ #define WRAP_MBLOCK(self, mblock_p) TypedData_Wrap_Struct(self, \
95
+ &MemoryBlockObject_type, mblock_p)
96
+
97
+ /* Mark Ruby objects within MemoryBlockObject. */
98
+ static void
99
+ MemoryBlockObject_dmark(void *self)
100
+ {
101
+ MemoryBlockObject *mblock = (MemoryBlockObject*)self;
102
+
103
+ rb_gc_mark(mblock->type);
104
+ }
105
+
106
+ static void
107
+ MemoryBlockObject_dfree(void *self)
108
+ {
109
+ MemoryBlockObject *mblock = (MemoryBlockObject*)self;
110
+
111
+ xnd_del(mblock->xnd);
112
+ mblock->xnd = NULL;
113
+ xfree(mblock);
114
+ }
115
+
116
+ static size_t
117
+ MemoryBlockObject_dsize(const void *self)
118
+ {
119
+ return sizeof(MemoryBlockObject);
120
+ }
121
+
122
+ static const rb_data_type_t MemoryBlockObject_type = {
123
+ .wrap_struct_name = "MemoryBlockObject",
124
+ .function = {
125
+ .dmark = MemoryBlockObject_dmark,
126
+ .dfree = MemoryBlockObject_dfree,
127
+ .dsize = MemoryBlockObject_dsize,
128
+ .reserved = {0,0},
129
+ },
130
+ .parent = 0,
131
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY,
132
+ };
133
+
134
+ /* Allocate a MemoryBlockObject and return a pointer to allocated memory. */
135
+ static MemoryBlockObject *
136
+ mblock_alloc(void)
137
+ {
138
+ MemoryBlockObject *self;
139
+
140
+ self = ALLOC(MemoryBlockObject);
141
+ if (self == NULL) {
142
+
143
+ }
144
+
145
+ self->type = NULL;
146
+ self->xnd = NULL;
147
+
148
+ return self;
149
+ }
150
+
151
+ /* Allocate a MemoryBlockObject and wrap it in a Ruby object. */
152
+ static VALUE
153
+ mblock_allocate(void)
154
+ {
155
+ MemoryBlockObject *self = mblock_alloc();
156
+
157
+ return WRAP_MBLOCK(cRubyXND_MBlock, self);
158
+ }
159
+
160
+ /* Create empty mblock with no data. */
161
+ static VALUE
162
+ mblock_empty(VALUE type)
163
+ {
164
+ NDT_STATIC_CONTEXT(ctx);
165
+ MemoryBlockObject *mblock_p;
166
+
167
+ if (!rb_ndtypes_check_type(type)) {
168
+ rb_raise(rb_eArgError, "require NDT object to create mblock in mblock_empty.");
169
+ }
170
+
171
+ mblock_p = mblock_alloc();
172
+ mblock_p->xnd = xnd_empty_from_type(
173
+ rb_ndtypes_const_ndt(type),
174
+ XND_OWN_EMBEDDED, &ctx);
175
+ if (mblock_p->xnd == NULL) {
176
+ rb_raise(rb_eValueError, "cannot create mblock object from given type.");
177
+ }
178
+ mblock_p->type = type;
179
+
180
+ return WRAP_MBLOCK(cRubyXND_MBlock, mblock_p);
181
+ }
182
+
183
+ static VALUE
184
+ mblock_from_xnd(xnd_t *src)
185
+ {
186
+ NDT_STATIC_CONTEXT(ctx);
187
+ MemoryBlockObject *mblock_p;
188
+ VALUE type, mblock;
189
+ xnd_master_t *x;
190
+
191
+ x = xnd_from_xnd(src, XND_OWN_EMBEDDED, &ctx);
192
+ if (x == NULL) {
193
+ seterr(&ctx);
194
+ raise_error();
195
+ }
196
+
197
+ type = rb_ndtypes_from_type((ndt_t *)x->master.type);
198
+ mblock = mblock_allocate();
199
+
200
+ GET_MBLOCK(mblock, mblock_p);
201
+
202
+ mblock_p->type = type;
203
+ mblock_p->xnd = x;
204
+
205
+ return mblock;
206
+ }
207
+
208
+ static void
209
+ _strncpy(char *dest, const void *src, size_t len, size_t size)
210
+ {
211
+ assert (len <= size);
212
+ memcpy(dest, src, len);
213
+ memset(dest+len, '\0', size-len);
214
+ }
215
+
216
+ /* Return true if String is unicode. */
217
+ static int
218
+ string_is_unicode(VALUE str)
219
+ {
220
+
221
+ }
222
+
223
+ static int
224
+ string_is_ascii(VALUE str)
225
+ {
226
+ return RB_ENCODING_IS_ASCII8BIT(str);
227
+ }
228
+
229
+ /* FIXME: Below functions make Ruby function calls. Find more efficient way. */
230
+
231
+ /* Return true if String is UTF-8 encoding. */
232
+ static int
233
+ string_is_u8(VALUE str)
234
+ {
235
+ return RTEST(
236
+ rb_funcall(
237
+ rb_funcall(str, rb_intern("encoding"), 0, NULL),
238
+ rb_intern("=="), 1,
239
+ rb_const_get(rb_cEncoding, rb_intern("UTF_8"))
240
+ )
241
+ );
242
+ }
243
+
244
+ /* Encode a string to enc. enc should be name of the constant in Encoding::*. */
245
+ static VALUE
246
+ string_encode(VALUE str, const char * enc)
247
+ {
248
+ return rb_funcall(str, rb_intern("encode"), 1,
249
+ rb_const_get(rb_cEncoding, rb_intern(enc)));
250
+ }
251
+
252
+ static int64_t
253
+ u8_skip_trailing_zero(const uint8_t *ptr, int64_t codepoints)
254
+ {
255
+ int64_t i;
256
+
257
+ for (i=codepoints-1; i >= 0; i--)
258
+ if (ptr[i] != 0)
259
+ return i+1;
260
+
261
+ return 0;
262
+ }
263
+
264
+ static int64_t
265
+ u16_skip_trailing_zero(const uint16_t *ptr, int64_t codepoints)
266
+ {
267
+ int64_t i;
268
+
269
+ for (i=codepoints-1; i >= 0; i--)
270
+ if (ptr[i] != 0)
271
+ return i+1;
272
+
273
+ return 0;
274
+ }
275
+
276
+ static int64_t
277
+ u32_skip_trailing_zero(const uint32_t *ptr, int64_t codepoints)
278
+ {
279
+ int64_t i;
280
+
281
+ for (i=codepoints-1; i >= 0; i--)
282
+ if (ptr[i] != 0)
283
+ return i+1;
284
+
285
+ return 0;
286
+ }
287
+
288
+ static int64_t
289
+ get_int(VALUE data, int64_t min, int64_t max)
290
+ {
291
+ int64_t x;
292
+
293
+ x = NUM2LL(data);
294
+ if (x < min || x > max) {
295
+ rb_raise(rb_eRangeError, "Number out of range of int64 range.");
296
+ }
297
+
298
+ return x;
299
+ }
300
+
301
+ static uint64_t
302
+ get_uint(VALUE data, uint64_t max)
303
+ {
304
+ unsigned long long x = NUM2ULL(data);
305
+
306
+ if (x == (unsigned long long)-1) {
307
+ rb_raise(rb_eRangeError, "cannot assigned negative number to unsigned type.");
308
+ }
309
+
310
+ if (x > max) {
311
+ rb_raise(rb_eRangeError, "number out of range: %ulld", x);
312
+ }
313
+
314
+ return x;
315
+ }
316
+
317
+ /* Initialize an mblock object with data. */
318
+ static int
319
+ mblock_init(xnd_t * const x, VALUE data)
320
+ {
321
+ NDT_STATIC_CONTEXT(ctx);
322
+ const ndt_t * const t = x->type;
323
+
324
+ if (!check_invariants(t)) {
325
+ rb_raise(rb_eArgError, "invariants in type.");
326
+ }
327
+
328
+ if (ndt_is_abstract(t)) {
329
+ rb_raise(rb_eTypeError, "specified NDT has abstract type.");
330
+ }
331
+
332
+ /* set missing value. */
333
+ if (ndt_is_optional(t)) {
334
+ if (t->ndim > 0) {
335
+ rb_raise(rb_eNotImpError,
336
+ "optional dimensions are not implemented.");
337
+ }
338
+
339
+ if (data == Qnil) {
340
+ xnd_set_na(x);
341
+ return 0;
342
+ }
343
+
344
+ xnd_set_valid(x);
345
+ }
346
+
347
+ switch (t->tag) {
348
+ case FixedDim: {
349
+ const int64_t shape = t->FixedDim.shape;
350
+ int64_t i;
351
+
352
+ Check_Type(data, T_ARRAY);
353
+
354
+ if (RARRAY_LEN(data) != shape) {
355
+ rb_raise(rb_eArgError,
356
+ "Input length (%ld) and type length (%ld) mismatch.",
357
+ RARRAY_LEN(data), shape);
358
+ }
359
+
360
+ for (i = 0; i < shape; i++) {
361
+ xnd_t next = xnd_fixed_dim_next(x, i);
362
+ VALUE rb_index[1] = { LL2NUM(i) };
363
+
364
+ mblock_init(&next, rb_ary_aref(1, rb_index, data));
365
+ }
366
+ return 0;
367
+ }
368
+
369
+ case VarDim: {
370
+ int64_t start, step, shape;
371
+ int64_t i;
372
+
373
+ Check_Type(data, T_ARRAY);
374
+
375
+ shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
376
+ if (shape < 0) {
377
+ seterr(&ctx);
378
+ raise_error();
379
+ }
380
+
381
+ if (RARRAY_LEN(data) != shape) {
382
+ rb_raise(rb_eValueError, "expected Array with size %ld not %ld.",
383
+ RARRAY_LEN(data), shape);
384
+ }
385
+
386
+ for (i = 0; i < shape; i++) {
387
+ xnd_t next = xnd_var_dim_next(x, start, step, i);
388
+ VALUE rb_index[1] = { LL2NUM(i) };
389
+
390
+ mblock_init(&next, rb_ary_aref(1, rb_index, data));
391
+ }
392
+
393
+ return 0;
394
+ }
395
+
396
+ case Tuple: {
397
+ const int64_t shape = t->Tuple.shape;
398
+ int64_t i;
399
+
400
+ /* since ruby does not have immutable tuple-type, we use Array instead. */
401
+ Check_Type(data, T_ARRAY);
402
+
403
+ if (RARRAY_LEN(data) != shape) {
404
+ rb_raise(rb_eArgError,
405
+ "expected Array with size %ld, not %ld.",
406
+ shape, RARRAY_LEN(data));
407
+ }
408
+
409
+ for (i = 0; i < shape; i++) {
410
+ xnd_t next = xnd_tuple_next(x, i, &ctx);
411
+ if (next.ptr == NULL) {
412
+ seterr(&ctx);
413
+ raise_error();
414
+ }
415
+ VALUE rb_index[1] = { LL2NUM(i) };
416
+
417
+ mblock_init(&next, rb_ary_aref(1, rb_index, data));
418
+ }
419
+
420
+ return 0;
421
+ }
422
+
423
+ case Record: {
424
+ const int64_t shape = t->Record.shape;
425
+ VALUE temp;
426
+ int64_t i;
427
+
428
+ Check_Type(data, T_HASH);
429
+
430
+ if (rb_xnd_hash_size(data) != shape) {
431
+ rb_raise(rb_eArgError, "expected Hash size does not match with shape size.");
432
+ }
433
+
434
+ for (i = 0; i < shape; i++) {
435
+ xnd_t next = xnd_record_next(x, i, &ctx);
436
+ if (next.ptr == NULL) {
437
+ seterr(&ctx);
438
+ raise_error();
439
+ }
440
+
441
+ temp = rb_hash_aref(data, rb_str_new2(t->Record.names[i]));
442
+ mblock_init(&next, temp);
443
+ }
444
+
445
+ return 0;
446
+ }
447
+
448
+ case Ref: {
449
+ xnd_t next = xnd_ref_next(x, &ctx);
450
+ if (next.ptr == NULL) {
451
+ seterr(&ctx);
452
+ raise_error();
453
+ }
454
+
455
+ return mblock_init(&next, data);
456
+ }
457
+
458
+ case Constr: {
459
+ xnd_t next = xnd_constr_next(x, &ctx);
460
+ if (next.ptr == NULL) {
461
+ seterr(&ctx);
462
+ raise_error();
463
+ }
464
+
465
+ return mblock_init(&next, data);
466
+ }
467
+
468
+ case Nominal: {
469
+ xnd_t next = xnd_nominal_next(x, &ctx);
470
+ if (next.ptr == NULL) {
471
+ seterr(&ctx);
472
+ raise_error();
473
+ }
474
+
475
+ if (t->Nominal.meth->init != NULL) {
476
+ if (!t->Nominal.meth->init(&next, x, &ctx)) {
477
+ rb_raise(rb_eTypeError, "could not init Nominal type in mblock_init.");
478
+ }
479
+ return 0;
480
+ }
481
+
482
+ mblock_init(&next, data);
483
+
484
+ if (t->Nominal.meth->constraint != NULL &&
485
+ !t->Nominal.meth->constraint(&next, &ctx)) {
486
+ seterr(&ctx);
487
+ raise_error();
488
+ }
489
+
490
+ return 0;
491
+ }
492
+
493
+ case Bool: {
494
+ bool b;
495
+
496
+ if (data == Qnil) {
497
+ rb_raise(rb_eTypeError,
498
+ "assigning nil to memory block with non-optional type.");
499
+ }
500
+
501
+ if (RTEST(data)) {
502
+ if (FIXNUM_P(data) || RB_FLOAT_TYPE_P(data)) {
503
+ if (NUM2INT(data) == 0) {
504
+ b = 0;
505
+ }
506
+ }
507
+ else {
508
+ b = 1;
509
+ }
510
+ }
511
+ else {
512
+ b = 0;
513
+ }
514
+
515
+ PACK_SINGLE(x->ptr, b, bool, t->flags);
516
+ return 0;
517
+ }
518
+
519
+ case Int8: {
520
+ int8_t temp = (int8_t)get_int(data, INT8_MIN, INT8_MAX);
521
+
522
+ PACK_SINGLE(x->ptr, temp, int8_t, t->flags);
523
+ return 0;
524
+ }
525
+
526
+ case Int16: {
527
+ int16_t temp = (int16_t)get_int(data, INT16_MIN, INT16_MAX);
528
+
529
+ PACK_SINGLE(x->ptr, temp, int16_t, t->flags);
530
+ return 0;
531
+ }
532
+
533
+ case Int32: {
534
+ int32_t temp = (int32_t)get_int(data, INT32_MIN, INT32_MAX);
535
+
536
+ PACK_SINGLE(x->ptr, temp, int32_t, t->flags);
537
+ return 0;
538
+ }
539
+
540
+ case Int64: {
541
+ int64_t temp = get_int(data, INT64_MIN, INT64_MAX);
542
+
543
+ PACK_SINGLE(x->ptr, temp, int64_t, t->flags);
544
+ return 0;
545
+ }
546
+
547
+ case Uint8: {
548
+ uint8_t temp = (uint8_t)get_uint(data, UINT8_MAX);
549
+ PACK_SINGLE(x->ptr, temp, uint8_t, t->flags);
550
+ return 0;
551
+ }
552
+
553
+ case Uint16: {
554
+ uint16_t temp = (uint16_t)get_uint(data, UINT16_MAX);
555
+
556
+ PACK_SINGLE(x->ptr, temp, uint16_t, t->flags);
557
+ return 0;
558
+ }
559
+
560
+ case Uint32: {
561
+ uint32_t temp = (uint32_t)get_uint(data, UINT32_MAX);
562
+
563
+ PACK_SINGLE(x->ptr, temp, uint32_t, t->flags);
564
+ return 0;
565
+ }
566
+
567
+ case Uint64: {
568
+ uint64_t temp = get_uint(data, UINT64_MAX);
569
+
570
+ PACK_SINGLE(x->ptr, temp, uint64_t, t->flags);
571
+ return 0;
572
+ }
573
+
574
+ case Float16: {
575
+ rb_raise(rb_eNotImpError, "float16 not implemented.");
576
+ }
577
+
578
+ case Float32: {
579
+ double temp = NUM2DBL(data);
580
+ return rb_xnd_pack_float32(temp, (unsigned char*)x->ptr, le(t->flags));
581
+ }
582
+
583
+ case Float64: {
584
+ double temp = NUM2DBL(data);
585
+ return rb_xnd_pack_float64(temp, (unsigned char*)x->ptr, le(t->flags));
586
+ }
587
+
588
+ case Complex32: {
589
+ rb_raise(rb_eNotImpError, "complex32 not implemented.");
590
+ }
591
+
592
+ case Complex64: {
593
+ double real, imag;
594
+
595
+ rb_xnd_get_complex_values(data, &real, &imag);
596
+
597
+ rb_xnd_pack_float32(real, (unsigned char*)x->ptr, le(t->flags));
598
+ rb_xnd_pack_float32(imag, (unsigned char*)x->ptr + 4, le(t->flags));
599
+
600
+ return 0;
601
+ }
602
+
603
+ case Complex128: {
604
+ double real, imag;
605
+
606
+ rb_xnd_get_complex_values(data, &real, &imag);
607
+
608
+ rb_xnd_pack_float64(real, (unsigned char*)x->ptr, le(t->flags));
609
+ rb_xnd_pack_float64(imag, (unsigned char*)x->ptr + 8, le(t->flags));
610
+
611
+ return 0;
612
+ }
613
+
614
+ case FixedString: {
615
+ int64_t codepoints = t->FixedString.size;
616
+ int64_t len;
617
+
618
+ Check_Type(data, T_STRING);
619
+
620
+ /* FIXME: check for unicode string. */
621
+
622
+ switch (t->FixedString.encoding) {
623
+ case Ascii: {
624
+ if (!string_is_ascii(data)) {
625
+ rb_raise(rb_eValueError, "string must be ascii");
626
+ }
627
+
628
+ len = RSTRING_LEN(data);
629
+ if (len > t->datasize) {
630
+ rb_raise(rb_eValueError,
631
+ "maxmimum string size in bytes is %" PRIi64, codepoints);
632
+ }
633
+
634
+ _strncpy(x->ptr, StringValuePtr(data), (size_t)len, (size_t)t->datasize);
635
+ return 0;
636
+ }
637
+
638
+ case Utf8: {
639
+ if (!string_is_u8(data)) {
640
+ rb_raise(rb_eValueError, "string must be utf-8");
641
+ }
642
+
643
+ len = RSTRING_LEN(data);
644
+ if (len > t->datasize) {
645
+ rb_raise(rb_eValueError,
646
+ "maximum string size (in UTF-8 code points) is %" PRIi64, codepoints);
647
+ }
648
+
649
+ _strncpy(x->ptr, StringValuePtr(data), (size_t)len, (size_t)t->datasize);
650
+ return 0;
651
+ }
652
+
653
+ case Utf16: {
654
+ rb_encoding *utf16 = rb_enc_find("UTF-16");
655
+ VALUE b = rb_str_export_to_enc(data, utf16);
656
+
657
+ len = RSTRING_LEN(b);
658
+ if (len-2 > t->datasize) {
659
+ rb_raise(rb_eValueError,
660
+ "maximum string size (in UTF-16 code points) is %" PRIi64, codepoints);
661
+ }
662
+
663
+ #ifdef XND_DEBUG
664
+ /* skip byte order mark. */
665
+ assert(len >= 2);
666
+ #endif
667
+ _strncpy(x->ptr, StringValuePtr(b) + 2, (size_t)(len-2), (size_t)t->datasize);
668
+ return 0;
669
+ }
670
+
671
+ case Utf32: {
672
+ VALUE b = rb_str_export_to_enc(data, rb_enc_find("UTF-32"));
673
+
674
+ len = RSTRING_LEN(b);
675
+ if (len-4 > t->datasize) {
676
+ rb_raise(rb_eValueError,
677
+ "maximum string size (in UTF-32 code points) is %" PRIi64, codepoints);
678
+ }
679
+
680
+ #ifdef XND_DEBUG
681
+ /* skip byte order mark. */
682
+ assert(len >= 4);
683
+ #endif
684
+ _strncpy(x->ptr, StringValuePtr(b)+4, (size_t)(len-4), (size_t)t->datasize);
685
+ return 0;
686
+ }
687
+
688
+ case Ucs2: {
689
+ rb_raise(rb_eNotImpError, "UCS2 encoding not implemented.");
690
+ }
691
+
692
+ default: {
693
+ rb_raise(rb_eRuntimeError, "invaling string encoding.");
694
+ }
695
+ }
696
+ }
697
+
698
+ case FixedBytes: {
699
+ int64_t size = t->FixedBytes.size;
700
+ int64_t len;
701
+
702
+ Check_Type(data, T_STRING);
703
+
704
+ if (!string_is_ascii(data)) {
705
+ rb_raise(rb_eTypeError, "String must be ASCII encoded for FixedBytes.");
706
+ }
707
+
708
+ len = RSTRING_LEN(data);
709
+
710
+ if (len > size) {
711
+ rb_raise(rb_eValueError, "maximum number of bytes in string is %", PRIi64, size);
712
+ }
713
+
714
+ _strncpy(x->ptr, StringValuePtr(data), (size_t)len, (size_t)size);
715
+
716
+ return 0;
717
+ }
718
+
719
+ case String: {
720
+ size_t size;
721
+ const char *cp;
722
+ char *s;
723
+
724
+ Check_Type(data, T_STRING);
725
+
726
+ cp = StringValuePtr(data);
727
+ size = RSTRING_LEN(data);
728
+ s = ndt_strdup(cp, &ctx);
729
+ if (s == NULL) {
730
+ seterr(&ctx);
731
+ raise_error();
732
+ }
733
+
734
+ if (XND_POINTER_DATA(x->ptr)) {
735
+ ndt_free(XND_POINTER_DATA(x->ptr));
736
+ }
737
+
738
+ XND_POINTER_DATA(x->ptr) = s;
739
+ return 0;
740
+ }
741
+
742
+ case Bytes: {
743
+ size_t size;
744
+ char *cp, *s;
745
+
746
+ Check_Type(data, T_STRING);
747
+
748
+ size = RSTRING_LEN(data);
749
+ cp = StringValuePtr(data);
750
+
751
+ s = ndt_aligned_calloc(t->Bytes.target_align, size);
752
+ if (s == NULL) {
753
+ rb_raise(rb_eNoMemError, "no memory for allocating bytes.");
754
+ }
755
+
756
+ memcpy(s, cp, size);
757
+
758
+ if (XND_BYTES_DATA(x->ptr)) {
759
+ ndt_aligned_free(XND_BYTES_DATA(x->ptr));
760
+ }
761
+
762
+ XND_BYTES_SIZE(x->ptr) = size;
763
+ XND_BYTES_DATA(x->ptr) = (uint8_t *)s;
764
+
765
+ return 0;
766
+ }
767
+
768
+ case Categorical: {
769
+ int64_t k;
770
+
771
+ if (RB_TYPE_P(data, T_TRUE) || RB_TYPE_P(data, T_FALSE)) {
772
+ int temp = RTEST(data);
773
+
774
+ for (k = 0; k < t->Categorical.ntypes; k++) {
775
+ if (t->Categorical.types[k].tag == ValBool &&
776
+ temp == t->Categorical.types[k].ValBool) {
777
+ PACK_SINGLE(x->ptr, k, int64_t, t->flags);
778
+ return 0;
779
+ }
780
+ }
781
+ }
782
+ else if (RB_TYPE_P(data, T_FIXNUM)) {
783
+ int64_t temp = get_int(data, INT64_MIN, INT64_MAX);
784
+
785
+ for (k = 0; k < t->Categorical.ntypes; k++) {
786
+ if (t->Categorical.types[k].tag == ValInt64 &&
787
+ temp == t->Categorical.types[k].ValInt64) {
788
+ PACK_SINGLE(x->ptr, k, int64_t, t->flags);
789
+ return 0;
790
+ }
791
+ }
792
+ }
793
+ else if (RB_TYPE_P(data, T_FLOAT)) {
794
+ double temp = NUM2DBL(data);
795
+
796
+ for (k = 0; k < t->Categorical.ntypes; k++) {
797
+ if (t->Categorical.types[k].tag == ValFloat64 &&
798
+ temp == t->Categorical.types[k].ValFloat64) {
799
+ PACK_SINGLE(x->ptr, k, int64_t, t->flags);
800
+ return 0;
801
+ }
802
+ }
803
+ }
804
+ else if (RB_TYPE_P(data, T_STRING)) {
805
+ const char *temp = StringValuePtr(data);
806
+
807
+ for (k = 0; k < t->Categorical.ntypes; k++) {
808
+ if (t->Categorical.types[k].tag == ValString &&
809
+ strcmp(temp, t->Categorical.types[k].ValString) == 0) {
810
+ PACK_SINGLE(x->ptr, k, int64_t, t->flags);
811
+ return 0;
812
+ }
813
+ }
814
+ }
815
+
816
+ for (k = 0; k < t->Categorical.ntypes; k++) {
817
+ if (t->Categorical.types[k].tag == ValNA) {
818
+ PACK_SINGLE(x->ptr, k, int64_t, t->flags);
819
+ return 0;
820
+ }
821
+ }
822
+
823
+ rb_raise(rb_eValueError, "category not found.");
824
+ }
825
+
826
+ case Char: {
827
+ rb_raise(rb_eNotImpError, "'Char' type semantics need to be defined.");
828
+ }
829
+
830
+ case Module: {
831
+ rb_raise(rb_eNotImpError, "'Module' type not implemented.");
832
+ }
833
+
834
+ /* NOT REACHED: intercepted by ndt_is_abstract(). */
835
+ case AnyKind: case SymbolicDim: case EllipsisDim: case Typevar:
836
+ case ScalarKind: case SignedKind: case UnsignedKind: case FloatKind:
837
+ case ComplexKind: case FixedStringKind: case FixedBytesKind:
838
+ case Function:
839
+ rb_raise(rb_eArgError, "unexpected abstract type.");
840
+ }
841
+ }
842
+
843
+ /* Create mblock from NDT type.
844
+ *
845
+ * @param type - NDT Ruby object.
846
+ * @param data - Data as a Ruby object.
847
+ */
848
+ static VALUE
849
+ mblock_from_typed_value(VALUE type, VALUE data)
850
+ {
851
+ VALUE mblock;
852
+ MemoryBlockObject *mblock_p;
853
+
854
+ mblock = mblock_empty(type);
855
+ GET_MBLOCK(mblock, mblock_p);
856
+ mblock_init(&mblock_p->xnd->master, data);
857
+
858
+ return mblock;
859
+ }
860
+
861
+ /****************************************************************************/
862
+ /* xnd object */
863
+ /****************************************************************************/
864
+
865
+ typedef struct XndObject {
866
+ VALUE mblock; /* owner of the primary type and memory block */
867
+ VALUE type; /* owner of the current type. lives and dies with this obj. */
868
+ xnd_t xnd; /* typed view, does not own anything */
869
+ } XndObject;
870
+
871
+ #define XND(xnd_p) (&(((XndObject *)xnd_p)->xnd))
872
+ #define XND_CHECK_TYPE(xnd) (CLASS_OF(xnd) == cXND)
873
+ #define GET_XND(obj, xnd_p) do { \
874
+ TypedData_Get_Struct((obj), XndObject, \
875
+ &XndObject_type, (xnd_p)); \
876
+ } while (0)
877
+ #define MAKE_XND(klass, xnd_p) TypedData_Make_Struct(klass, XndObject, \
878
+ &XndObject_type, xnd_p)
879
+ #define WRAP_XND(klass, xnd_p) TypedData_Wrap_Struct(klass, &XndObject_type, xnd_p)
880
+
881
+ static VALUE XND_size(VALUE self);
882
+
883
+ /* Allocate an XndObject and return wrapped in a Ruby object. */
884
+ static VALUE
885
+ XndObject_alloc(void)
886
+ {
887
+ XndObject *xnd;
888
+
889
+ xnd = ZALLOC(XndObject);
890
+
891
+ xnd->mblock = 0;
892
+ xnd->type = 0;
893
+ xnd->xnd.bitmap.data = NULL;
894
+ xnd->xnd.bitmap.size = 0;
895
+ xnd->xnd.bitmap.next = NULL;
896
+ xnd->xnd.index = 0;
897
+ xnd->xnd.type = NULL;
898
+ xnd->xnd.ptr = NULL;
899
+
900
+ return WRAP_XND(cXND, xnd);
901
+ }
902
+
903
+ /* Mark Ruby objects within XndObject. */
904
+ static void
905
+ XndObject_dmark(void *self)
906
+ {
907
+ XndObject *xnd = (XndObject*)self;
908
+
909
+ rb_gc_mark(xnd->type);
910
+ rb_gc_mark(xnd->mblock);
911
+ }
912
+
913
+ static void
914
+ XndObject_dfree(void *self)
915
+ {
916
+ XndObject *xnd = (XndObject*)self;
917
+
918
+ rb_xnd_gc_guard_unregister(xnd);
919
+ xfree(xnd);
920
+ }
921
+
922
+ static size_t
923
+ XndObject_dsize(const void *self)
924
+ {
925
+ return sizeof(XndObject);
926
+ }
927
+
928
+ static const rb_data_type_t XndObject_type = {
929
+ .wrap_struct_name = "XndObject",
930
+ .function = {
931
+ .dmark = XndObject_dmark,
932
+ .dfree = XndObject_dfree,
933
+ .dsize = XndObject_dsize,
934
+ .reserved = {0,0},
935
+ },
936
+ .parent = 0,
937
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY,
938
+ };
939
+
940
+ static void
941
+ XND_from_mblock(XndObject *xnd_p, VALUE mblock)
942
+ {
943
+ MemoryBlockObject *mblock_p;
944
+
945
+ GET_MBLOCK(mblock, mblock_p);
946
+
947
+ xnd_p->mblock = mblock;
948
+ xnd_p->type = mblock_p->type;
949
+ xnd_p->xnd = mblock_p->xnd->master;
950
+ }
951
+
952
+ /* Allocator for RubyXND object. Called by Ruby before initialize. */
953
+ static VALUE
954
+ RubyXND_allocate(VALUE klass)
955
+ {
956
+ XndObject *xnd;
957
+
958
+ xnd = ZALLOC(XndObject);
959
+
960
+ xnd->mblock = 0;
961
+ xnd->type = 0;
962
+ xnd->xnd.bitmap.data = NULL;
963
+ xnd->xnd.bitmap.size = 0;
964
+ xnd->xnd.bitmap.next = NULL;
965
+ xnd->xnd.index = 0;
966
+ xnd->xnd.type = NULL;
967
+ xnd->xnd.ptr = NULL;
968
+
969
+ return WRAP_XND(klass, xnd);
970
+ }
971
+
972
+ /* Initialize a RubyXND object. */
973
+ static VALUE
974
+ RubyXND_initialize(VALUE self, VALUE type, VALUE data)
975
+ {
976
+ VALUE mblock;
977
+ XndObject *xnd_p;
978
+
979
+ mblock = mblock_from_typed_value(type, data);
980
+ GET_XND(self, xnd_p);
981
+
982
+ XND_from_mblock(xnd_p, mblock);
983
+ rb_xnd_gc_guard_register(xnd_p, mblock);
984
+
985
+ #ifdef XND_DEBUG
986
+ assert(XND(xnd_p)->type);
987
+ assert(XND(xnd_p)->ptr);
988
+ #endif
989
+
990
+ return self;
991
+ }
992
+
993
+ static size_t
994
+ XND_get_size(VALUE xnd)
995
+ {
996
+ size_t size;
997
+ int state;
998
+ VALUE rb_size;
999
+
1000
+ rb_size = rb_protect(XND_size, xnd, &state);
1001
+ size = state ? 0 : NUM2LL(rb_size);
1002
+
1003
+ return size;
1004
+ }
1005
+
1006
+ /*************************** object properties ********************************/
1007
+
1008
+ /* Return the ndtypes object of this xnd object. */
1009
+ static VALUE
1010
+ XND_type(VALUE self)
1011
+ {
1012
+ XndObject *xnd_p;
1013
+
1014
+ GET_XND(self, xnd_p);
1015
+
1016
+ return xnd_p->type;
1017
+ }
1018
+
1019
+ static VALUE
1020
+ _XND_value(const xnd_t * const x, const int64_t maxshape)
1021
+ {
1022
+ NDT_STATIC_CONTEXT(ctx);
1023
+ const ndt_t * const t = x->type;
1024
+
1025
+ #ifdef XND_DEBUG
1026
+ assert(t);
1027
+ assert(x);
1028
+ #endif
1029
+
1030
+ if (!ndt_is_concrete(t)) {
1031
+ rb_raise(rb_eTypeError, "type must be concrete for returning value.");
1032
+ }
1033
+
1034
+ /* bitmap access needs linear index. */
1035
+ if (xnd_is_na(x)) {
1036
+ return Qnil;
1037
+ }
1038
+
1039
+ switch (t->tag) {
1040
+ case FixedDim: {
1041
+ VALUE array, v;
1042
+ int64_t shape, i;
1043
+
1044
+ shape = t->FixedDim.shape;
1045
+ if (shape > maxshape) {
1046
+ shape = maxshape;
1047
+ }
1048
+
1049
+ array = array_new(shape);
1050
+
1051
+ for (i = 0; i < shape; i++) {
1052
+ if (i == maxshape-1) {
1053
+ rb_ary_store(array, i, xnd_ellipsis());
1054
+ break;
1055
+ }
1056
+
1057
+ const xnd_t next = xnd_fixed_dim_next(x, i);
1058
+ v = _XND_value(&next, maxshape);
1059
+ rb_ary_store(array, i, v);
1060
+ }
1061
+
1062
+ return array;
1063
+ }
1064
+
1065
+ case VarDim: {
1066
+ VALUE array, v;
1067
+ int64_t start, step, shape;
1068
+ int64_t i;
1069
+
1070
+ shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
1071
+ if (shape < 0) {
1072
+ seterr(&ctx);
1073
+ raise_error();
1074
+ }
1075
+
1076
+ array = array_new(shape);
1077
+
1078
+ for (i = 0; i < shape; i++) {
1079
+ if (i == maxshape-1) {
1080
+ rb_ary_store(array, i, xnd_ellipsis());
1081
+ break;
1082
+ }
1083
+
1084
+ xnd_t next = xnd_var_dim_next(x, start, step, i);
1085
+ v = _XND_value(&next, maxshape);
1086
+ rb_ary_store(array, i, v);
1087
+ }
1088
+
1089
+ return array;
1090
+ }
1091
+
1092
+ case Tuple: {
1093
+ VALUE tuple, v;
1094
+ int64_t shape, i;
1095
+
1096
+ shape = t->Tuple.shape;
1097
+ if (shape > maxshape) {
1098
+ shape = maxshape;
1099
+ }
1100
+
1101
+ tuple = array_new(shape);
1102
+
1103
+ for (i = 0; i < shape; i++) {
1104
+ if (i == maxshape) {
1105
+ rb_ary_store(tuple, i, xnd_ellipsis());
1106
+ break;
1107
+ }
1108
+
1109
+ const xnd_t next = xnd_tuple_next(x, i, &ctx);
1110
+ if (next.ptr == NULL) {
1111
+ seterr(&ctx);
1112
+ raise_error();
1113
+ }
1114
+
1115
+ v = _XND_value(&next, maxshape);
1116
+ rb_ary_store(tuple, i, v);
1117
+ }
1118
+
1119
+ return tuple;
1120
+ }
1121
+
1122
+ case Record: {
1123
+ VALUE hash, v;
1124
+ int64_t shape, i;
1125
+
1126
+ shape = t->Record.shape;
1127
+ if (shape > maxshape) {
1128
+ shape = maxshape;
1129
+ }
1130
+
1131
+ hash = rb_hash_new();
1132
+
1133
+ for (i = 0; i < shape; ++i) {
1134
+ if (i == maxshape - i) {
1135
+ rb_hash_aset(hash, xnd_ellipsis(), xnd_ellipsis());
1136
+ break;
1137
+ }
1138
+
1139
+ xnd_t next = xnd_record_next(x, i, &ctx);
1140
+ if (next.ptr == NULL) {
1141
+ seterr(&ctx);
1142
+ raise_error();
1143
+ }
1144
+
1145
+ v = _XND_value(&next, maxshape);
1146
+ rb_hash_aset(hash, rb_str_new2(t->Record.names[i]), v);
1147
+ }
1148
+
1149
+ return hash;
1150
+ }
1151
+
1152
+ case Ref: {
1153
+ xnd_t next = xnd_ref_next(x, &ctx);
1154
+ if (next.ptr == NULL) {
1155
+ seterr(&ctx);
1156
+ raise_error();
1157
+ }
1158
+
1159
+ return _XND_value(&next, maxshape);
1160
+ }
1161
+
1162
+ case Constr: {
1163
+ xnd_t next = xnd_constr_next(x, &ctx);
1164
+ if (next.ptr == NULL) {
1165
+ seterr(&ctx);
1166
+ raise_error();
1167
+ }
1168
+
1169
+ return _XND_value(&next, maxshape);
1170
+ }
1171
+
1172
+ case Nominal: {
1173
+ xnd_t next = xnd_nominal_next(x, &ctx);
1174
+ if (next.ptr == NULL) {
1175
+ seterr(&ctx);
1176
+ raise_error();
1177
+ }
1178
+
1179
+ if (t->Nominal.meth->repr != NULL) {
1180
+ return t->Nominal.meth->repr(&next, &ctx);
1181
+ }
1182
+
1183
+ return _XND_value(&next, maxshape);
1184
+ }
1185
+
1186
+ case Bool: {
1187
+ bool temp;
1188
+ UNPACK_SINGLE(temp, x->ptr, bool, t->flags);
1189
+ return INT2BOOL(temp);
1190
+ }
1191
+
1192
+ case Int8: {
1193
+ int8_t temp;
1194
+ UNPACK_SINGLE(temp, x->ptr, int8_t, t->flags);
1195
+ return INT2NUM(temp);
1196
+ }
1197
+
1198
+ case Int16: {
1199
+ int16_t temp;
1200
+ UNPACK_SINGLE(temp, x->ptr, int16_t, t->flags);
1201
+ return INT2NUM(temp);
1202
+ }
1203
+
1204
+ case Int32: {
1205
+ int32_t temp;
1206
+ UNPACK_SINGLE(temp, x->ptr, int32_t, t->flags);
1207
+ return INT2NUM(temp);
1208
+ }
1209
+
1210
+ case Int64: {
1211
+ int64_t temp;
1212
+ UNPACK_SINGLE(temp, x->ptr, int64_t, t->flags);
1213
+ return LL2NUM(temp);
1214
+ }
1215
+
1216
+ case Uint8: {
1217
+ uint8_t temp;
1218
+ UNPACK_SINGLE(temp, x->ptr, uint8_t, t->flags);
1219
+ return UINT2NUM(temp);
1220
+ }
1221
+
1222
+ case Uint16: {
1223
+ uint16_t temp;
1224
+ UNPACK_SINGLE(temp, x->ptr, uint16_t, t->flags);
1225
+ return UINT2NUM(temp);
1226
+ }
1227
+
1228
+ case Uint32: {
1229
+ uint32_t temp;
1230
+ UNPACK_SINGLE(temp, x->ptr, uint32_t, t->flags);
1231
+ return ULL2NUM(temp);
1232
+ }
1233
+
1234
+ case Uint64: {
1235
+ uint64_t temp;
1236
+ UNPACK_SINGLE(temp, x->ptr, uint64_t, t->flags);
1237
+ return ULL2NUM(temp);
1238
+ }
1239
+
1240
+ case Float16: {
1241
+ rb_raise(rb_eNotImpError, "float16 is not implemented.");
1242
+ }
1243
+
1244
+ case Float32: {
1245
+ float temp = 0.0;
1246
+
1247
+ rb_xnd_unpack_float32(&temp, (unsigned char*)x->ptr, le(t->flags));
1248
+ return DBL2NUM(temp);
1249
+ }
1250
+
1251
+ case Float64: {
1252
+ double temp = 0.0;
1253
+
1254
+ rb_xnd_unpack_float64(&temp, (unsigned char*)x->ptr, le(t->flags));
1255
+ return DBL2NUM(temp);
1256
+ }
1257
+
1258
+ case Complex32: {
1259
+ rb_raise(rb_eNotImpError, "complex32 not implemented.");
1260
+ }
1261
+
1262
+ case Complex64: {
1263
+ float real = 0.0, imag = 0.0;
1264
+
1265
+ rb_xnd_unpack_float32(&real, (unsigned char*)x->ptr, le(t->flags));
1266
+ rb_xnd_unpack_float32(&imag, (unsigned char*)x->ptr+4, le(t->flags));
1267
+
1268
+ return rb_complex_new(DBL2NUM(real), DBL2NUM(imag));
1269
+ }
1270
+
1271
+ case Complex128: {
1272
+ double real = 0.0, imag = 0.0;
1273
+
1274
+ rb_xnd_unpack_float64(&real, (unsigned char*)x->ptr, le(t->flags));
1275
+ rb_xnd_unpack_float64(&imag, (unsigned char*)x->ptr+8, le(t->flags));
1276
+
1277
+ return rb_complex_new(DBL2NUM(real), DBL2NUM(imag));
1278
+ }
1279
+
1280
+ case FixedString: {
1281
+ int64_t codepoints = t->FixedString.size;
1282
+
1283
+ switch (t->FixedString.encoding) {
1284
+ case Ascii: {
1285
+ codepoints = u8_skip_trailing_zero((uint8_t *)x->ptr, codepoints);
1286
+ return rb_usascii_str_new(x->ptr, codepoints);
1287
+ }
1288
+
1289
+ case Utf8: {
1290
+ codepoints = u8_skip_trailing_zero((uint8_t *)x->ptr, codepoints);
1291
+ return rb_utf8_str_new(x->ptr, codepoints);
1292
+ }
1293
+
1294
+ case Utf16: {
1295
+ rb_encoding *utf16 = rb_enc_find("UTF-16");
1296
+ codepoints = u16_skip_trailing_zero((uint16_t *)x->ptr, codepoints);
1297
+
1298
+ return rb_enc_str_new(x->ptr, codepoints*2, utf16);
1299
+ }
1300
+
1301
+ case Utf32: {
1302
+ rb_encoding *utf32 = rb_enc_find("UTF-32");
1303
+ codepoints = u32_skip_trailing_zero((uint32_t *)x->ptr, codepoints);
1304
+
1305
+ return rb_enc_str_new(x->ptr, codepoints*4, utf32);
1306
+ }
1307
+
1308
+ case Ucs2: {
1309
+ rb_raise(rb_eNotImpError, "UCS2 encoding not implemented.");
1310
+ }
1311
+
1312
+ default: {
1313
+ rb_raise(rb_eRuntimeError, "invalid string encoding.");
1314
+ }
1315
+ }
1316
+ }
1317
+
1318
+ case FixedBytes: {
1319
+ return bytes_from_string_and_size(x->ptr, t->FixedBytes.size);
1320
+ }
1321
+
1322
+ case String: {
1323
+ const char *s = XND_POINTER_DATA(x->ptr);
1324
+ size_t size = s ? strlen(s) : 0;
1325
+
1326
+ return rb_utf8_str_new(s, size);
1327
+ }
1328
+
1329
+ case Bytes: {
1330
+ char *s = (char *)XND_BYTES_DATA(x->ptr);
1331
+ size_t size = s ? strlen(s) : 0;
1332
+
1333
+ return bytes_from_string_and_size(s, size);
1334
+ }
1335
+
1336
+ case Categorical: {
1337
+ int64_t k;
1338
+
1339
+ UNPACK_SINGLE(k, x->ptr, int64_t, t->flags);
1340
+
1341
+ switch(t->Categorical.types[k].tag) {
1342
+ case ValBool: {
1343
+ bool temp = t->Categorical.types[k].ValBool;
1344
+ return INT2BOOL(temp);
1345
+ }
1346
+
1347
+ case ValInt64: {
1348
+ int64_t temp = t->Categorical.types[k].ValInt64;
1349
+ return LL2NUM(temp);
1350
+ }
1351
+
1352
+ case ValFloat64: {
1353
+ double temp = t->Categorical.types[k].ValFloat64;
1354
+ return DBL2NUM(temp);
1355
+ }
1356
+
1357
+ case ValString: {
1358
+ const char *temp = t->Categorical.types[k].ValString;
1359
+ return rb_str_new2(temp);
1360
+ }
1361
+
1362
+ case ValNA: {
1363
+ return Qnil;
1364
+ }
1365
+
1366
+ default: {
1367
+ rb_raise(rb_eRuntimeError, "unexpected category tag.");
1368
+ }
1369
+ }
1370
+ }
1371
+
1372
+ case Char: {
1373
+ rb_raise(rb_eNotImpError, "char semantics need to be defined.");
1374
+ }
1375
+
1376
+ case Module: {
1377
+ rb_raise(rb_eNotImpError, "'Module' type not implemented yet.");
1378
+ }
1379
+
1380
+ /* NOT REACHED: intercepted by ndt_is_abstract(). */
1381
+ case AnyKind: case SymbolicDim: case EllipsisDim: case Typevar:
1382
+ case ScalarKind: case SignedKind: case UnsignedKind: case FloatKind:
1383
+ case ComplexKind: case FixedStringKind: case FixedBytesKind:
1384
+ case Function:
1385
+ rb_raise(rb_eArgError, "unexpected abstract type.");
1386
+ }
1387
+
1388
+ rb_raise(rb_eRuntimeError, "invalid type tag %d.", t->tag);
1389
+ }
1390
+
1391
+ /* Return the value of this xnd object. Aliased to to_a. */
1392
+ static VALUE
1393
+ XND_value(VALUE self)
1394
+ {
1395
+ XndObject *xnd_p;
1396
+
1397
+ GET_XND(self, xnd_p);
1398
+
1399
+ return _XND_value(XND(xnd_p), INT64_MAX);
1400
+ }
1401
+
1402
+ /*************************** slicing functions ********************************/
1403
+
1404
+ #define KEY_INDEX 1
1405
+ #define KEY_FIELD 2
1406
+ #define KEY_SLICE 4
1407
+ #define KEY_ERROR 128
1408
+
1409
+ /*
1410
+ @param src_p Pointer to the source XND object from which view is being created.
1411
+ @param x Metadata for creating the view.
1412
+ */
1413
+ static VALUE
1414
+ RubyXND_view_move_type(XndObject *src_p, xnd_t *x)
1415
+ {
1416
+ XndObject *view_p;
1417
+ VALUE type, view;
1418
+
1419
+ type = rb_ndtypes_move_subtree(src_p->type, (ndt_t *)x->type);
1420
+ view = XndObject_alloc();
1421
+ GET_XND(view, view_p);
1422
+
1423
+ view_p->mblock = src_p->mblock;
1424
+ view_p->type = type;
1425
+ view_p->xnd = *x;
1426
+
1427
+ rb_xnd_gc_guard_register(view_p, view_p->mblock);
1428
+
1429
+ return view;
1430
+ }
1431
+
1432
+ /* Convert a single Ruby object index into a form that XND can understand.
1433
+
1434
+ @param *key
1435
+ @param obj
1436
+ @param size Size of object. 1 for all scalars and the actual size otherwise.
1437
+ */
1438
+ static uint8_t
1439
+ convert_single(xnd_index_t *key, VALUE obj, size_t size)
1440
+ {
1441
+ if (RB_TYPE_P(obj, T_FIXNUM)) {
1442
+ int64_t i = NUM2LL(obj);
1443
+
1444
+ key->tag = Index;
1445
+ key->Index = i;
1446
+
1447
+ return KEY_INDEX;
1448
+ }
1449
+ else if (RB_TYPE_P(obj, T_STRING)) {
1450
+ const char *s = StringValuePtr(obj);
1451
+
1452
+ key->tag = FieldName;
1453
+ key->FieldName = s;
1454
+
1455
+ return KEY_FIELD;
1456
+ }
1457
+ else if (CLASS_OF(obj) == rb_cRange) {
1458
+ if (size == 0) {
1459
+ rb_raise(rb_eIndexError, "Cannot use Range on this type.");
1460
+ };
1461
+
1462
+ size_t begin, end, step;
1463
+
1464
+ rb_range_unpack(obj, &begin, &end, &step, size);
1465
+ key->tag = Slice;
1466
+ key->Slice.start = begin;
1467
+ key->Slice.stop = end;
1468
+ key->Slice.step = step;
1469
+
1470
+ return KEY_SLICE;
1471
+ }
1472
+ // case of INF (infinite range syntax sugar)
1473
+ else if (RB_TYPE_P(obj, T_FLOAT)) {
1474
+ double value = RFLOAT_VALUE(obj);
1475
+
1476
+ if (isinf(value)) {
1477
+ key->tag = Slice;
1478
+ key->Slice.start = 0;
1479
+ key->Slice.stop = INT64_MAX;
1480
+ key->Slice.step = 1;
1481
+
1482
+ return KEY_SLICE;
1483
+ }
1484
+ else {
1485
+ rb_raise(rb_eArgError, "wrong object specified in index.");
1486
+ }
1487
+ }
1488
+ else {
1489
+ rb_raise(rb_eArgError, "wrong object specified in index.");
1490
+ }
1491
+ }
1492
+
1493
+ static uint8_t
1494
+ convert_key(xnd_index_t *indices, int *len, int argc, VALUE *argv, size_t size)
1495
+ {
1496
+ uint8_t flags = 0;
1497
+ VALUE x;
1498
+
1499
+ if (argc > 1) {
1500
+ if (argc > NDT_MAX_DIM) {
1501
+ rb_raise(rb_eArgError, "too many indices %d.", argc);
1502
+ }
1503
+
1504
+ for (unsigned int i = 0; i < argc; i++) {
1505
+ x = argv[i];
1506
+ flags |= convert_single(indices+i, x, size);
1507
+ if (flags & KEY_ERROR) {
1508
+ return KEY_ERROR;
1509
+ }
1510
+ }
1511
+
1512
+ *len = argc;
1513
+ return flags;
1514
+ }
1515
+ else if (argc == 1 && RB_TYPE_P(argv[0], T_ARRAY)) { // args as an array
1516
+ *len = RARRAY_LEN(argv[0]);
1517
+
1518
+ for (int i = 0; i < *len; i++) {
1519
+ VALUE args[1] = { INT2NUM(i) };
1520
+ flags |= convert_single(indices+i, rb_ary_aref(1, args, argv[0]), size);
1521
+ if (flags & KEY_ERROR) {
1522
+ return KEY_ERROR;
1523
+ }
1524
+ }
1525
+ return flags;
1526
+ }
1527
+
1528
+ *len = 1;
1529
+ return convert_single(indices, argv[0], size);
1530
+ }
1531
+
1532
+ /* Implement the #[] Ruby method. */
1533
+ static VALUE
1534
+ XND_array_aref(int argc, VALUE *argv, VALUE self)
1535
+ {
1536
+ NDT_STATIC_CONTEXT(ctx);
1537
+ xnd_index_t indices[NDT_MAX_DIM];
1538
+ xnd_t x;
1539
+ int len, state;
1540
+ uint8_t flags;
1541
+ XndObject *xnd_p;
1542
+ size_t size;
1543
+ VALUE rb_size;
1544
+
1545
+ if (argc == 0) {
1546
+ rb_raise(rb_eArgError, "expected atleast one argument for #[].");
1547
+ }
1548
+
1549
+ GET_XND(self, xnd_p);
1550
+ size = XND_get_size(self);
1551
+
1552
+ flags = convert_key(indices, &len, argc, argv, size);
1553
+ if (flags & KEY_ERROR) {
1554
+ rb_raise(rb_eArgError, "something is wrong with the array key.");
1555
+ }
1556
+
1557
+ x = xnd_subscript(&xnd_p->xnd, indices, len, &ctx);
1558
+ if (x.ptr == NULL) {
1559
+ seterr(&ctx);
1560
+ raise_error();
1561
+ }
1562
+
1563
+ return RubyXND_view_move_type(xnd_p, &x);
1564
+ }
1565
+
1566
+ /* Implementation for #== method.
1567
+
1568
+ @param other Other Ruby object to compare with.
1569
+ @return VALUE [TrueClass|FalseClass]
1570
+ */
1571
+ static VALUE
1572
+ XND_eqeq(VALUE self, VALUE other)
1573
+ {
1574
+ NDT_STATIC_CONTEXT(ctx);
1575
+ XndObject *left_p, *right_p;
1576
+ int r;
1577
+
1578
+ if (!XND_CHECK_TYPE(other)) {
1579
+ return Qfalse;
1580
+ }
1581
+
1582
+ GET_XND(self, left_p);
1583
+ GET_XND(other, right_p);
1584
+
1585
+ r = xnd_equal(XND(left_p), XND(right_p), &ctx);
1586
+
1587
+ if (r == 1) {
1588
+ return Qtrue;
1589
+ }
1590
+ else {
1591
+ return Qfalse;
1592
+ }
1593
+ }
1594
+
1595
+ /* Implement Ruby spaceship operator. */
1596
+ static VALUE
1597
+ XND_spaceship(VALUE self, VALUE other)
1598
+ {
1599
+ rb_raise(rb_eNotImpError, "spaceship not implemented yet.");
1600
+
1601
+ return Qnil;
1602
+ }
1603
+
1604
+ /* XND#strict_equal */
1605
+ static VALUE
1606
+ XND_strict_equal(VALUE self, VALUE other)
1607
+ {
1608
+ NDT_STATIC_CONTEXT(ctx);
1609
+ XndObject *left_p, *right_p;
1610
+ int r;
1611
+
1612
+ if (!XND_CHECK_TYPE(other)) {
1613
+ rb_raise(rb_eArgError, "argument type has to be XND.");
1614
+ }
1615
+
1616
+ GET_XND(self, left_p);
1617
+ GET_XND(other, right_p);
1618
+
1619
+ r = xnd_strict_equal(XND(left_p), XND(right_p), &ctx);
1620
+ if (r < 0) {
1621
+ seterr(&ctx);
1622
+ raise_error();
1623
+ }
1624
+
1625
+ if (r) {
1626
+ return Qtrue;
1627
+ }
1628
+ else {
1629
+ return Qfalse;
1630
+ }
1631
+ }
1632
+
1633
+ static size_t
1634
+ _XND_size(const xnd_t *x)
1635
+ {
1636
+ NDT_STATIC_CONTEXT(ctx);
1637
+ const ndt_t *t = x->type;
1638
+
1639
+ if (!ndt_is_concrete(t)) {
1640
+ rb_raise(rb_eTypeError, "NDT must be concrete to get size.");
1641
+ }
1642
+
1643
+ if (t->ndim > 0 && ndt_is_optional(t)) {
1644
+ rb_raise(rb_eNotImpError, "optional dimensions are not supported.");
1645
+ }
1646
+
1647
+ if (xnd_is_na(x)) {
1648
+ return 0;
1649
+ }
1650
+
1651
+ switch (t->tag) {
1652
+ case FixedDim: {
1653
+ return safe_downcast(t->FixedDim.shape);
1654
+ }
1655
+
1656
+ case VarDim: {
1657
+ int64_t start, step, shape;
1658
+
1659
+ shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
1660
+ if (shape < 0) {
1661
+ seterr(&ctx);
1662
+ raise_error();
1663
+ }
1664
+
1665
+ return safe_downcast(shape);
1666
+ }
1667
+
1668
+ case Tuple: {
1669
+ return safe_downcast(t->Tuple.shape);
1670
+ }
1671
+
1672
+ case Record: {
1673
+ return safe_downcast(t->Record.shape);
1674
+ }
1675
+
1676
+ case Ref: {
1677
+ const xnd_t next = xnd_ref_next(x, &ctx);
1678
+ if (next.ptr == NULL) {
1679
+ seterr(&ctx);
1680
+ raise_error();
1681
+ }
1682
+
1683
+ return _XND_size(&next);
1684
+ }
1685
+
1686
+ case Constr: {
1687
+ const xnd_t next = xnd_constr_next(x, &ctx);
1688
+ if (next.ptr == NULL) {
1689
+ seterr(&ctx);
1690
+ raise_error();
1691
+ }
1692
+
1693
+ return _XND_size(&next);
1694
+ }
1695
+
1696
+ case Nominal: {
1697
+ const xnd_t next = xnd_nominal_next(x, &ctx);
1698
+ if (next.ptr == NULL) {
1699
+ seterr(&ctx);
1700
+ raise_error();
1701
+ }
1702
+
1703
+ return _XND_size(&next);
1704
+ }
1705
+
1706
+ default: {
1707
+ rb_raise(rb_eNoMethodError, "This type has no size method.");
1708
+ }
1709
+ }
1710
+ }
1711
+
1712
+ /* Implement XND#size. */
1713
+ static VALUE
1714
+ XND_size(VALUE self)
1715
+ {
1716
+ XndObject *self_p;
1717
+
1718
+ GET_XND(self, self_p);
1719
+ return ULL2NUM(_XND_size(XND(self_p)));
1720
+ }
1721
+
1722
+ /* Implement #[]= */
1723
+ static VALUE
1724
+ XND_array_store(int argc, VALUE *argv, VALUE self)
1725
+ {
1726
+ NDT_STATIC_CONTEXT(ctx);
1727
+ xnd_index_t indices[NDT_MAX_DIM];
1728
+ xnd_t x;
1729
+ int free_type = 0, ret, len;
1730
+ uint8_t flags;
1731
+ XndObject *self_p, *value_p;
1732
+ MemoryBlockObject *self_mblock_p;
1733
+ size_t size;
1734
+ VALUE value;
1735
+
1736
+ if (argc < 2) {
1737
+ rb_raise(rb_eArgError, "wrong number of arguments (given %d expected atleast 2)."
1738
+ , argc);
1739
+ }
1740
+
1741
+ GET_XND(self, self_p);
1742
+
1743
+ size = XND_get_size(self);
1744
+ flags = convert_key(indices, &len, argc-1, argv, size);
1745
+ if (flags & KEY_ERROR) {
1746
+ rb_raise(rb_eIndexError, "wrong kind of key in []=");
1747
+ }
1748
+
1749
+ if (flags & KEY_SLICE) {
1750
+ x = xnd_multikey(&self_p->xnd, indices, len, &ctx);
1751
+ free_type = 1;
1752
+ }
1753
+ else {
1754
+ x = xnd_subtree(&self_p->xnd, indices, len, &ctx);
1755
+ }
1756
+
1757
+ if (x.ptr == NULL) {
1758
+ seterr(&ctx);
1759
+ raise_error();
1760
+ }
1761
+
1762
+ value = argv[argc-1];
1763
+
1764
+ if (XND_CHECK_TYPE(value)) {
1765
+ GET_XND(value, value_p);
1766
+ GET_MBLOCK(self_p->mblock, self_mblock_p);
1767
+
1768
+ ret = xnd_copy(&x, XND(value_p), self_mblock_p->xnd->flags, &ctx);
1769
+ if (ret < 0) {
1770
+ seterr(&ctx);
1771
+ raise_error();
1772
+ }
1773
+ }
1774
+ else {
1775
+ ret = mblock_init(&x, value);
1776
+ }
1777
+
1778
+ if (free_type) {
1779
+ ndt_del((ndt_t *)x.type);
1780
+ }
1781
+
1782
+ return value;
1783
+ }
1784
+
1785
+ /* Implement XND#each */
1786
+ static VALUE
1787
+ XND_each(VALUE self)
1788
+ {
1789
+
1790
+ }
1791
+
1792
+ /*************************** Singleton methods ********************************/
1793
+
1794
+ static VALUE
1795
+ XND_s_empty(VALUE klass, VALUE type)
1796
+ {
1797
+ XndObject *self_p;
1798
+ VALUE self, mblock;
1799
+
1800
+ self = XndObject_alloc();
1801
+ GET_XND(self, self_p);
1802
+
1803
+ type = rb_ndtypes_from_object(type);
1804
+ mblock = mblock_empty(type);
1805
+
1806
+ XND_from_mblock(self_p, mblock);
1807
+ rb_xnd_gc_guard_register(self_p, mblock);
1808
+
1809
+ return self;
1810
+ }
1811
+
1812
+ /*************************** C-API ********************************/
1813
+
1814
+ size_t
1815
+ rb_xnd_hash_size(VALUE hash)
1816
+ {
1817
+ Check_Type(hash, T_HASH);
1818
+
1819
+ return NUM2ULL(rb_funcall(hash, rb_intern("size"), 0, NULL));
1820
+ }
1821
+
1822
+ /* FIXME: Find a better way to access real/imag parts of complex number.
1823
+ This is too slow.
1824
+ */
1825
+ int
1826
+ rb_xnd_get_complex_values(VALUE comp, double *real, double *imag)
1827
+ {
1828
+ Check_Type(comp, T_COMPLEX);
1829
+
1830
+ *real = NUM2DBL(rb_funcall(comp, rb_intern("real"), 0, NULL));
1831
+ *imag = NUM2DBL(rb_funcall(comp, rb_intern("imag"), 0, NULL));
1832
+
1833
+ return 0;
1834
+ }
1835
+
1836
+ /* Return true if obj is of type XND. */
1837
+ int
1838
+ rb_xnd_check_type(VALUE obj)
1839
+ {
1840
+ return XND_CHECK_TYPE(obj);
1841
+ }
1842
+
1843
+ /* Return the xnd_t internal object within this Ruby object. */
1844
+ const xnd_t *
1845
+ rb_xnd_const_xnd(VALUE xnd)
1846
+ {
1847
+ XndObject *xnd_p;
1848
+
1849
+ GET_XND(xnd, xnd_p);
1850
+
1851
+ return &((XndObject *)xnd_p)->xnd;
1852
+ }
1853
+
1854
+ /* Creae a new XND object from xnd_t type. */
1855
+ VALUE
1856
+ rb_xnd_from_xnd(xnd_t *x)
1857
+ {
1858
+ VALUE mblock, xnd;
1859
+ XndObject *xnd_p;
1860
+
1861
+ mblock = mblock_from_xnd(x);
1862
+ xnd = XndObject_alloc();
1863
+ GET_XND(xnd, xnd_p);
1864
+
1865
+ XND_from_mblock(xnd_p, mblock);
1866
+ rb_xnd_gc_guard_register(xnd_p, mblock);
1867
+
1868
+ return xnd;
1869
+ }
1870
+
1871
+ /* Create an XND object of type ndt_t */
1872
+ VALUE
1873
+ rb_xnd_empty_from_type(ndt_t *t)
1874
+ {
1875
+ MemoryBlockObject *mblock_p;
1876
+ XndObject *xnd_p;
1877
+ VALUE type, mblock, xnd;
1878
+
1879
+ type = rb_ndtypes_from_type(t);
1880
+ mblock = mblock_empty(type);
1881
+ xnd = XndObject_alloc();
1882
+
1883
+ GET_XND(xnd, xnd_p);
1884
+ rb_xnd_gc_guard_register(xnd_p, mblock);
1885
+
1886
+ XND_from_mblock(xnd_p, mblock);
1887
+
1888
+ return xnd;
1889
+ }
1890
+
1891
+ VALUE
1892
+ rb_xnd_get_type(void)
1893
+ {
1894
+
1895
+ }
1896
+
1897
+ /*
1898
+ * This function handles two common view cases:
1899
+ *
1900
+ * a) A pristine view that owns everything, including new memory.
1901
+ * b) A view that owns its type after xnd_subscript().
1902
+ */
1903
+ /* VALUE */
1904
+ /* rb_xnd_from_xnd_view(xnd_view_t *x) */
1905
+ /* { */
1906
+ /* if (x->obj == NULL && (x->flags & XND_OWN_ALL) == XND_OWN_ALL) { */
1907
+ /* VALUE type = rb_xnd_get_type(); */
1908
+ /* } */
1909
+ /* } */
1910
+
1911
+ void Init_ruby_xnd(void)
1912
+ {
1913
+ /* init classes */
1914
+ cRubyXND = rb_define_class("RubyXND", rb_cObject);
1915
+ cXND = rb_define_class("XND", cRubyXND);
1916
+ cRubyXND_MBlock = rb_define_class_under(cRubyXND, "MBlock", rb_cObject);
1917
+ cRubyXND_Ellipsis = rb_define_class_under(cRubyXND, "Ellipsis", rb_cObject);
1918
+ mRubyXND_GCGuard = rb_define_module_under(cRubyXND, "GCGuard");
1919
+
1920
+ /* errors */
1921
+ rb_eValueError = rb_define_class("ValueError", rb_eRuntimeError);
1922
+
1923
+ /* initializers */
1924
+ rb_define_alloc_func(cRubyXND, RubyXND_allocate);
1925
+ rb_define_method(cRubyXND, "initialize", RubyXND_initialize, 2);
1926
+
1927
+ /* instance methods */
1928
+ rb_define_method(cXND, "type", XND_type, 0);
1929
+ rb_define_method(cXND, "value", XND_value, 0);
1930
+ rb_define_method(cXND, "[]", XND_array_aref, -1);
1931
+ rb_define_method(cXND, "[]=", XND_array_store, -1);
1932
+ rb_define_method(cXND, "==", XND_eqeq, 1);
1933
+ // rb_define_method(cXND, "!=", XND_neq, 1);
1934
+ rb_define_method(cXND, "<=>", XND_spaceship, 1);
1935
+ rb_define_method(cXND, "strict_equal", XND_strict_equal, 1);
1936
+ rb_define_method(cXND, "size", XND_size, 0);
1937
+
1938
+ /* iterators */
1939
+ rb_define_method(cXND, "each", XND_each, 0);
1940
+
1941
+ /* singleton methods */
1942
+ rb_define_singleton_method(cXND, "empty", XND_s_empty, 1);
1943
+
1944
+ /* GC guard */
1945
+ rb_xnd_init_gc_guard();
1946
+
1947
+ #ifdef XND_DEBUG
1948
+ run_float_pack_unpack_tests();
1949
+ rb_define_const(cRubyXND, "XND_DEBUG", Qtrue);
1950
+ #else
1951
+ rb_define_const(cRubyXND, "XND_DEBUG", Qnil);
1952
+ #endif
1953
+ }