xnd 0.2.0dev6 → 0.2.0dev7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (74) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +2 -0
  3. data/Rakefile +1 -1
  4. data/ext/ruby_xnd/GPATH +0 -0
  5. data/ext/ruby_xnd/GRTAGS +0 -0
  6. data/ext/ruby_xnd/GTAGS +0 -0
  7. data/ext/ruby_xnd/extconf.rb +8 -5
  8. data/ext/ruby_xnd/gc_guard.c +53 -2
  9. data/ext/ruby_xnd/gc_guard.h +8 -2
  10. data/ext/ruby_xnd/include/overflow.h +147 -0
  11. data/ext/ruby_xnd/include/ruby_xnd.h +62 -0
  12. data/ext/ruby_xnd/include/xnd.h +590 -0
  13. data/ext/ruby_xnd/lib/libxnd.a +0 -0
  14. data/ext/ruby_xnd/lib/libxnd.so +1 -0
  15. data/ext/ruby_xnd/lib/libxnd.so.0 +1 -0
  16. data/ext/ruby_xnd/lib/libxnd.so.0.2.0dev3 +0 -0
  17. data/ext/ruby_xnd/ruby_xnd.c +556 -47
  18. data/ext/ruby_xnd/ruby_xnd.h +2 -1
  19. data/ext/ruby_xnd/xnd/Makefile +80 -0
  20. data/ext/ruby_xnd/xnd/config.h +26 -0
  21. data/ext/ruby_xnd/xnd/config.h.in +3 -0
  22. data/ext/ruby_xnd/xnd/config.log +421 -0
  23. data/ext/ruby_xnd/xnd/config.status +1023 -0
  24. data/ext/ruby_xnd/xnd/configure +376 -8
  25. data/ext/ruby_xnd/xnd/configure.ac +48 -7
  26. data/ext/ruby_xnd/xnd/doc/xnd/index.rst +3 -1
  27. data/ext/ruby_xnd/xnd/doc/xnd/{types.rst → xnd.rst} +3 -18
  28. data/ext/ruby_xnd/xnd/libxnd/Makefile +142 -0
  29. data/ext/ruby_xnd/xnd/libxnd/Makefile.in +43 -3
  30. data/ext/ruby_xnd/xnd/libxnd/Makefile.vc +19 -3
  31. data/ext/ruby_xnd/xnd/libxnd/bitmaps.c +42 -3
  32. data/ext/ruby_xnd/xnd/libxnd/bitmaps.o +0 -0
  33. data/ext/ruby_xnd/xnd/libxnd/bounds.c +366 -0
  34. data/ext/ruby_xnd/xnd/libxnd/bounds.o +0 -0
  35. data/ext/ruby_xnd/xnd/libxnd/contrib.h +98 -0
  36. data/ext/ruby_xnd/xnd/libxnd/contrib/bfloat16.h +213 -0
  37. data/ext/ruby_xnd/xnd/libxnd/copy.c +155 -4
  38. data/ext/ruby_xnd/xnd/libxnd/copy.o +0 -0
  39. data/ext/ruby_xnd/xnd/libxnd/cuda/cuda_memory.cu +121 -0
  40. data/ext/ruby_xnd/xnd/libxnd/cuda/cuda_memory.h +58 -0
  41. data/ext/ruby_xnd/xnd/libxnd/equal.c +195 -7
  42. data/ext/ruby_xnd/xnd/libxnd/equal.o +0 -0
  43. data/ext/ruby_xnd/xnd/libxnd/inline.h +32 -0
  44. data/ext/ruby_xnd/xnd/libxnd/libxnd.a +0 -0
  45. data/ext/ruby_xnd/xnd/libxnd/libxnd.so +1 -0
  46. data/ext/ruby_xnd/xnd/libxnd/libxnd.so.0 +1 -0
  47. data/ext/ruby_xnd/xnd/libxnd/libxnd.so.0.2.0dev3 +0 -0
  48. data/ext/ruby_xnd/xnd/libxnd/shape.c +207 -0
  49. data/ext/ruby_xnd/xnd/libxnd/shape.o +0 -0
  50. data/ext/ruby_xnd/xnd/libxnd/split.c +2 -2
  51. data/ext/ruby_xnd/xnd/libxnd/split.o +0 -0
  52. data/ext/ruby_xnd/xnd/libxnd/tests/Makefile +39 -0
  53. data/ext/ruby_xnd/xnd/libxnd/xnd.c +613 -91
  54. data/ext/ruby_xnd/xnd/libxnd/xnd.h +145 -4
  55. data/ext/ruby_xnd/xnd/libxnd/xnd.o +0 -0
  56. data/ext/ruby_xnd/xnd/python/test_xnd.py +1125 -50
  57. data/ext/ruby_xnd/xnd/python/xnd/__init__.py +609 -124
  58. data/ext/ruby_xnd/xnd/python/xnd/_version.py +1 -0
  59. data/ext/ruby_xnd/xnd/python/xnd/_xnd.c +1652 -101
  60. data/ext/ruby_xnd/xnd/python/xnd/libxnd.a +0 -0
  61. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so +1 -0
  62. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so.0 +1 -0
  63. data/ext/ruby_xnd/xnd/python/xnd/libxnd.so.0.2.0dev3 +0 -0
  64. data/ext/ruby_xnd/xnd/python/xnd/pyxnd.h +1 -1
  65. data/ext/ruby_xnd/xnd/python/xnd/util.h +25 -0
  66. data/ext/ruby_xnd/xnd/python/xnd/xnd.h +590 -0
  67. data/ext/ruby_xnd/xnd/python/xnd_randvalue.py +106 -6
  68. data/ext/ruby_xnd/xnd/python/xnd_support.py +4 -0
  69. data/ext/ruby_xnd/xnd/setup.py +46 -4
  70. data/lib/ruby_xnd.so +0 -0
  71. data/lib/xnd.rb +39 -3
  72. data/lib/xnd/version.rb +2 -2
  73. data/xnd.gemspec +2 -1
  74. metadata +58 -5
Binary file
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/lib/libxnd.so.0.2.0dev3
@@ -0,0 +1 @@
1
+ ext/ruby_xnd/lib/libxnd.so.0.2.0dev3
@@ -37,6 +37,10 @@
37
37
  #include "ruby_xnd_internal.h"
38
38
  #include "xnd.h"
39
39
 
40
+ #define XND_CHECK_NUMERIC(obj) Check_Type(obj, T_FIXNUM); \
41
+ Check_Type(obj, T_BIGNUM); Check_Type(obj, T_RATIONAL); \
42
+ Check_Type(obj, T_RATIONAL);
43
+
40
44
  VALUE cRubyXND;
41
45
  VALUE cXND;
42
46
  static VALUE cRubyXND_MBlock;
@@ -47,6 +51,8 @@ static VALUE rb_eValueError;
47
51
 
48
52
  VALUE mRubyXND_GCGuard;
49
53
 
54
+ static VALUE seterr(ndt_context_t *ctx);
55
+
50
56
  /****************************************************************************/
51
57
  /* Error handling */
52
58
  /****************************************************************************/
@@ -108,6 +114,7 @@ MemoryBlockObject_dfree(void *self)
108
114
  {
109
115
  MemoryBlockObject *mblock = (MemoryBlockObject*)self;
110
116
 
117
+ rb_xnd_gc_guard_unregister_mblock_type(mblock);
111
118
  xnd_del(mblock->xnd);
112
119
  mblock->xnd = NULL;
113
120
  xfree(mblock);
@@ -159,19 +166,21 @@ mblock_allocate(void)
159
166
 
160
167
  /* Create empty mblock with no data. */
161
168
  static VALUE
162
- mblock_empty(VALUE type)
169
+ mblock_empty(VALUE type, uint32_t flags)
163
170
  {
164
171
  NDT_STATIC_CONTEXT(ctx);
165
172
  MemoryBlockObject *mblock_p;
173
+ const ndt_t * ndt_p;
166
174
 
167
175
  if (!rb_ndtypes_check_type(type)) {
168
176
  rb_raise(rb_eArgError, "require NDT object to create mblock in mblock_empty.");
169
177
  }
170
178
 
171
179
  mblock_p = mblock_alloc();
172
- mblock_p->xnd = xnd_empty_from_type(
173
- rb_ndtypes_const_ndt(type),
174
- XND_OWN_EMBEDDED, &ctx);
180
+ ndt_p = rb_ndtypes_const_ndt(type);
181
+ // ndt_incref(ndt_p);
182
+
183
+ mblock_p->xnd = xnd_empty_from_type(ndt_p, XND_OWN_EMBEDDED|flags, &ctx);
175
184
  if (mblock_p->xnd == NULL) {
176
185
  rb_raise(rb_eValueError, "cannot create mblock object from given type.");
177
186
  }
@@ -299,6 +308,42 @@ get_uint(VALUE data, uint64_t max)
299
308
  return x;
300
309
  }
301
310
 
311
+ static int
312
+ union_tag_and_value_from_tuple(uint8_t *tag, VALUE *value, const ndt_t *t,
313
+ VALUE tuple)
314
+ {
315
+ VALUE name;
316
+ int64_t i;
317
+ char *sname;
318
+
319
+ assert(t->tag == Union);
320
+
321
+ if (RARRAY_LEN(tuple) != 2) {
322
+ rb_raise(rb_eValueError,
323
+ "unions are represented by a tuple (tag, value), where tag is a string.");
324
+ }
325
+
326
+ name = rb_ary_entry(tuple, 0);
327
+
328
+ Check_Type(name, T_STRING);
329
+
330
+ for (i = 0; i < t->Union.ntags; ++i) {
331
+ sname = RSTRING_PTR(name);
332
+ if (strcmp(sname, t->Union.tags[i]) == 0) {
333
+ break;
334
+ }
335
+ }
336
+
337
+ if (i == t->Union.ntags) {
338
+ rb_raise(rb_eValueError, "%s is not a valid tag name.", sname);
339
+ }
340
+
341
+ *tag = (uint8_t)i;
342
+ *value = rb_ary_entry(tuple, 1);
343
+
344
+ return 0;
345
+ }
346
+
302
347
  /* Initialize an mblock object with data. */
303
348
  static int
304
349
  mblock_init(xnd_t * const x, VALUE data)
@@ -430,6 +475,26 @@ mblock_init(xnd_t * const x, VALUE data)
430
475
  return 0;
431
476
  }
432
477
 
478
+ case Union : {
479
+ VALUE tmp;
480
+ uint8_t tag;
481
+
482
+ if (union_tag_and_value_from_tuple(&tag, &tmp, t, data) < 0) {
483
+ return -1;
484
+ }
485
+
486
+ xnd_clear(x, XND_OWN_EMBEDDED);
487
+ XND_UNION_TAG(x->ptr) = tag;
488
+
489
+ xnd_t next = xnd_union_next(x, &ctx);
490
+ if (next.ptr == NULL) {
491
+ seterr(&ctx);
492
+ raise_error();
493
+ }
494
+
495
+ return mblock_init(&next, tmp);
496
+ }
497
+
433
498
  case Ref: {
434
499
  xnd_t next = xnd_ref_next(x, &ctx);
435
500
  if (next.ptr == NULL) {
@@ -831,13 +896,13 @@ mblock_init(xnd_t * const x, VALUE data)
831
896
  * @param data - Data as a Ruby object.
832
897
  */
833
898
  static VALUE
834
- mblock_from_typed_value(VALUE type, VALUE data)
899
+ mblock_from_typed_value(VALUE type, VALUE data, int32_t flags)
835
900
  {
836
901
  VALUE mblock;
837
902
  MemoryBlockObject *mblock_p;
838
903
 
839
- mblock = mblock_empty(type);
840
- GET_MBLOCK(mblock, mblock_p);
904
+ mblock = mblock_empty(type, flags);
905
+ GET_MBLOCK(mblock, mblock_p);
841
906
  mblock_init(&mblock_p->xnd->master, data);
842
907
 
843
908
  return mblock;
@@ -854,6 +919,11 @@ typedef struct XndObject {
854
919
  } XndObject;
855
920
 
856
921
  #define XND(xnd_p) (&(((XndObject *)xnd_p)->xnd))
922
+ #define TYPE_OWNER(xnd_p) ((((XndObject *)xnd_p)->type))
923
+ #define XND_TYPE(xnd_p) (((XndObject *)xnd_p)->xnd.type)
924
+ #define XND_INDEX(xnd_p) (((XndObject *)xnd_p)->xnd.index)
925
+ #define XND_PTR(xnd_p) (((XndObject *)xnd_p)->xnd.ptr)
926
+
857
927
  #define XND_CHECK_TYPE(xnd) (CLASS_OF(xnd) == cXND)
858
928
  #define GET_XND(obj, xnd_p) do { \
859
929
  TypedData_Get_Struct((obj), XndObject, \
@@ -900,7 +970,8 @@ XndObject_dfree(void *self)
900
970
  {
901
971
  XndObject *xnd = (XndObject*)self;
902
972
 
903
- rb_xnd_gc_guard_unregister(xnd);
973
+ rb_xnd_gc_guard_unregister_xnd_mblock(xnd);
974
+ rb_xnd_gc_guard_unregister_xnd_type(xnd);
904
975
  xfree(xnd);
905
976
  }
906
977
 
@@ -954,18 +1025,57 @@ RubyXND_allocate(VALUE klass)
954
1025
  return WRAP_XND(klass, xnd);
955
1026
  }
956
1027
 
1028
+ static uint32_t
1029
+ device_flags(VALUE array)
1030
+ {
1031
+ VALUE device, no;
1032
+
1033
+ Check_Type(array, T_ARRAY);
1034
+ if (RARRAY_LEN(array) != 2) {
1035
+ rb_raise(rb_eTypeError, "device argument must be of the form (device_name, device_no).");
1036
+ }
1037
+
1038
+ device = rb_ary_entry(array, 0);
1039
+ Check_Type(device, T_STRING);
1040
+ if (RTEST(rb_funcall(device, rb_intern("=="), 1, rb_str_new_literal("cuda")))) {
1041
+ rb_raise(rb_eValueError, "currently only 'cuda' is supported as a device name.");
1042
+ }
1043
+
1044
+ no = rb_ary_entry(array, 1);
1045
+ Check_Type(no, T_FIXNUM);
1046
+ if (FIX2NUM(no) != -1) {
1047
+ rb_raise(rb_eValueError, "currently only 'cuda:managed' is supported as a device.");
1048
+ }
1049
+
1050
+ return XND_CUDA_MANAGED;
1051
+ }
1052
+
957
1053
  /* Initialize a RubyXND object. */
958
1054
  static VALUE
959
- RubyXND_initialize(VALUE self, VALUE type, VALUE data)
1055
+ RubyXND_initialize(VALUE self, VALUE type, VALUE data, VALUE device)
960
1056
  {
961
1057
  VALUE mblock;
1058
+ MemoryBlockObject *mblock_p;
962
1059
  XndObject *xnd_p;
1060
+ uint32_t flags = 0;
963
1061
 
964
- mblock = mblock_from_typed_value(type, data);
965
- GET_XND(self, xnd_p);
1062
+ if (device != Qnil) {
1063
+ flags = device_flags(device);
1064
+ if (flags == UINT32_MAX) {
1065
+ rb_raise(rb_eValueError, "device ID cannot be handled.");
1066
+ }
1067
+ }
966
1068
 
1069
+ mblock = mblock_from_typed_value(type, data, flags);
1070
+ GET_MBLOCK(mblock, mblock_p);
1071
+
1072
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
1073
+
1074
+ GET_XND(self, xnd_p);
967
1075
  XND_from_mblock(xnd_p, mblock);
968
- rb_xnd_gc_guard_register(xnd_p, mblock);
1076
+
1077
+ rb_xnd_gc_guard_register_xnd_mblock(xnd_p, mblock);
1078
+ rb_xnd_gc_guard_register_xnd_type(xnd_p, type);
969
1079
 
970
1080
  #ifdef XND_DEBUG
971
1081
  assert(XND(xnd_p)->type);
@@ -990,6 +1100,16 @@ XND_get_size(VALUE xnd)
990
1100
 
991
1101
  /*************************** object properties ********************************/
992
1102
 
1103
+ static VALUE
1104
+ XND_dtype(VALUE self)
1105
+ {
1106
+ XndObject *self_p;
1107
+ GET_XND(self, self_p);
1108
+ const ndt_t *dtype = ndt_dtype(XND_TYPE(self_p));
1109
+
1110
+ return rb_ndtypes_from_type(dtype);
1111
+ }
1112
+
993
1113
  /* Return the ndtypes object of this xnd object. */
994
1114
  static VALUE
995
1115
  XND_type(VALUE self)
@@ -998,7 +1118,6 @@ XND_type(VALUE self)
998
1118
 
999
1119
  GET_XND(self, xnd_p);
1000
1120
 
1001
-
1002
1121
  return xnd_p->type;
1003
1122
  }
1004
1123
 
@@ -1077,6 +1196,25 @@ _XND_value(const xnd_t * const x, const int64_t maxshape)
1077
1196
 
1078
1197
  return array;
1079
1198
  }
1199
+
1200
+ case VarDimElem: {
1201
+ int64_t start, step, shape;
1202
+
1203
+ shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
1204
+ if (shape < 0) {
1205
+ seterr(&ctx);
1206
+ raise_error();
1207
+ }
1208
+
1209
+ const int64_t i = adjust_index(t->VarDimElem.index, shape, &ctx);
1210
+ if (i < 0) {
1211
+ seterr(&ctx);
1212
+ raise_error();
1213
+ }
1214
+
1215
+ const xnd_t next = xnd_var_dim_next(x, start, step, i);
1216
+ return _XND_value(&next, maxshape);
1217
+ }
1080
1218
 
1081
1219
  case Tuple: {
1082
1220
  VALUE tuple, v;
@@ -1138,6 +1276,27 @@ _XND_value(const xnd_t * const x, const int64_t maxshape)
1138
1276
  return hash;
1139
1277
  }
1140
1278
 
1279
+ case Union: {
1280
+ VALUE array, tag, v;
1281
+
1282
+ array = rb_ary_new2(2);
1283
+ const uint8_t i = XND_UNION_TAG(x->ptr);
1284
+ tag = rb_str_new2(t->Union.tags[i]);
1285
+
1286
+ rb_ary_store(array, 0, tag);
1287
+
1288
+ const xnd_t next = xnd_union_next(x, &ctx);
1289
+ if (next.ptr == NULL) {
1290
+ seterr(&ctx);
1291
+ raise_error();
1292
+ }
1293
+ v = _XND_value(&next, maxshape);
1294
+
1295
+ rb_ary_store(array, 1, v);
1296
+
1297
+ return array;
1298
+ }
1299
+
1141
1300
  case Ref: {
1142
1301
  xnd_t next = xnd_ref_next(x, &ctx);
1143
1302
  if (next.ptr == NULL) {
@@ -1309,8 +1468,8 @@ _XND_value(const xnd_t * const x, const int64_t maxshape)
1309
1468
  }
1310
1469
 
1311
1470
  case String: {
1312
- const char *s = XND_POINTER_DATA(x->ptr);
1313
- size_t size = s ? strlen(s) : 0;
1471
+ const char *s = XND_STRING_DATA(x->ptr);
1472
+ size_t size = strlen(s);
1314
1473
 
1315
1474
  return rb_utf8_str_new(s, size);
1316
1475
  }
@@ -1403,9 +1562,12 @@ static VALUE
1403
1562
  RubyXND_view_move_type(XndObject *src_p, xnd_t *x)
1404
1563
  {
1405
1564
  XndObject *view_p;
1565
+ MemoryBlockObject* mblock_p;
1406
1566
  VALUE type, view;
1407
1567
 
1408
- type = rb_ndtypes_move_subtree(src_p->type, (ndt_t *)x->type);
1568
+ type = rb_ndtypes_from_type(x->type);
1569
+ ndt_decref(x->type);
1570
+
1409
1571
  view = XndObject_alloc();
1410
1572
  GET_XND(view, view_p);
1411
1573
 
@@ -1413,7 +1575,11 @@ RubyXND_view_move_type(XndObject *src_p, xnd_t *x)
1413
1575
  view_p->type = type;
1414
1576
  view_p->xnd = *x;
1415
1577
 
1416
- rb_xnd_gc_guard_register(view_p, view_p->mblock);
1578
+ GET_MBLOCK(view_p->mblock, mblock_p);
1579
+
1580
+ rb_xnd_gc_guard_register_xnd_type(view_p, type);
1581
+ rb_xnd_gc_guard_register_xnd_mblock(view_p, view_p->mblock);
1582
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
1417
1583
 
1418
1584
  return view;
1419
1585
  }
@@ -1598,6 +1764,34 @@ XND_spaceship(VALUE self, VALUE other)
1598
1764
  return Qnil;
1599
1765
  }
1600
1766
 
1767
+ static VALUE
1768
+ XND_lt(VALUE self, VALUE other)
1769
+ {
1770
+ rb_raise(rb_eNotImpError, "< not implemented yet.");
1771
+ return Qnil;
1772
+ }
1773
+
1774
+ static VALUE
1775
+ XND_lteq(VALUE self, VALUE other)
1776
+ {
1777
+ rb_raise(rb_eNotImpError, "<= not implemented yet.");
1778
+ return Qnil;
1779
+ }
1780
+
1781
+ static VALUE
1782
+ XND_gt(VALUE self, VALUE other)
1783
+ {
1784
+ rb_raise(rb_eNotImpError, "> not implemented yet.");
1785
+ return Qnil;
1786
+ }
1787
+
1788
+ static VALUE
1789
+ XND_gteq(VALUE self, VALUE other)
1790
+ {
1791
+ rb_raise(rb_eNotImpError, ">= not implemented yet.");
1792
+ return Qnil;
1793
+ }
1794
+
1601
1795
  /* XND#strict_equal */
1602
1796
  static VALUE
1603
1797
  XND_strict_equal(VALUE self, VALUE other)
@@ -1662,6 +1856,26 @@ _XND_size(const xnd_t *x)
1662
1856
  return safe_downcast(shape);
1663
1857
  }
1664
1858
 
1859
+ case VarDimElem: {
1860
+ NDT_STATIC_CONTEXT(ctx);
1861
+ int64_t start, step, shape;
1862
+
1863
+ shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
1864
+ if (shape < 0) {
1865
+ seterr(&ctx);
1866
+ raise_error();
1867
+ }
1868
+
1869
+ const int64_t i = adjust_index(t->VarDimElem.index, shape, &ctx);
1870
+ if (i < 0) {
1871
+ seterr(&ctx);
1872
+ raise_error();
1873
+ }
1874
+
1875
+ const xnd_t next = xnd_var_dim_next(x, start, step, i);
1876
+ return _XND_size(&next);
1877
+ }
1878
+
1665
1879
  case Tuple: {
1666
1880
  return safe_downcast(t->Tuple.shape);
1667
1881
  }
@@ -1670,6 +1884,16 @@ _XND_size(const xnd_t *x)
1670
1884
  return safe_downcast(t->Record.shape);
1671
1885
  }
1672
1886
 
1887
+ case Union: {
1888
+ const xnd_t next = xnd_union_next(x, &ctx);
1889
+ if (next.ptr == NULL) {
1890
+ seterr(&ctx);
1891
+ raise_error();
1892
+ }
1893
+
1894
+ return _XND_size(&next);
1895
+ }
1896
+
1673
1897
  case Ref: {
1674
1898
  const xnd_t next = xnd_ref_next(x, &ctx);
1675
1899
  if (next.ptr == NULL) {
@@ -1743,19 +1967,11 @@ XND_array_store(int argc, VALUE *argv, VALUE self)
1743
1967
  rb_raise(rb_eIndexError, "wrong kind of key in []=");
1744
1968
  }
1745
1969
 
1746
- if (flags & KEY_SLICE) {
1747
- x = xnd_multikey(&self_p->xnd, indices, len, &ctx);
1748
- free_type = 1;
1749
- }
1750
- else {
1751
- x = xnd_subtree(&self_p->xnd, indices, len, &ctx);
1752
- }
1753
-
1970
+ x = xnd_subscript(&self_p->xnd, indices, len, &ctx);
1754
1971
  if (x.ptr == NULL) {
1755
1972
  seterr(&ctx);
1756
1973
  raise_error();
1757
1974
  }
1758
-
1759
1975
  value = argv[argc-1];
1760
1976
 
1761
1977
  if (XND_CHECK_TYPE(value)) {
@@ -1771,10 +1987,7 @@ XND_array_store(int argc, VALUE *argv, VALUE self)
1771
1987
  else {
1772
1988
  ret = mblock_init(&x, value);
1773
1989
  }
1774
-
1775
- if (free_type) {
1776
- ndt_del((ndt_t *)x.type);
1777
- }
1990
+ ndt_decref(x.type);
1778
1991
 
1779
1992
  return value;
1780
1993
  }
@@ -1807,22 +2020,299 @@ XND_short_value(VALUE self, VALUE maxshape)
1807
2020
  }
1808
2021
  }
1809
2022
 
2023
+ /* Implementation of XND#_transpose */
2024
+ static VALUE
2025
+ XND_transpose(VALUE self, VALUE permute) {
2026
+ NDT_STATIC_CONTEXT(ctx);
2027
+ int p[NDT_MAX_ARGS];
2028
+ const ndt_t *t;
2029
+ xnd_t x;
2030
+ XndObject *self_p;
2031
+
2032
+ GET_XND(self, self_p);
2033
+
2034
+ if (permute != Qnil) {
2035
+ Check_Type(permute, T_ARRAY);
2036
+ const size_t len = RARRAY_LEN(permute);
2037
+
2038
+ if (len > NDT_MAX_ARGS) {
2039
+ rb_raise(rb_eValueError, "permutation list is too long.");
2040
+ }
2041
+
2042
+ for (int i = 0; i < len; i++) {
2043
+ VALUE v_obj = rb_ary_entry(permute, i);
2044
+ Check_Type(v_obj, T_FIXNUM);
2045
+ int v = FIX2INT(v_obj);
2046
+
2047
+ if (v < 0 || v > INT_MAX) {
2048
+ rb_raise(rb_eValueError, "permutation index is out of bounds.");
2049
+ }
2050
+ p[i] = (int)v;
2051
+ }
2052
+
2053
+ t = ndt_transpose(XND_TYPE(self_p), p, (int)len, &ctx);
2054
+ }
2055
+ else {
2056
+ t = ndt_transpose(XND_TYPE(self_p), NULL, 0, &ctx);
2057
+ }
2058
+
2059
+ if (t == NULL) {
2060
+ seterr(&ctx);
2061
+ raise_error();
2062
+ }
2063
+
2064
+ x = *XND(self_p);
2065
+ x.type = t;
2066
+
2067
+ return RubyXND_view_move_type(self_p, &x);
2068
+ }
2069
+
2070
+ /* XND#_reshape */
2071
+ static VALUE
2072
+ XND_reshape(VALUE self, VALUE obj_shape, VALUE order)
2073
+ {
2074
+ NDT_STATIC_CONTEXT(ctx);
2075
+ int64_t shape[NDT_MAX_DIM];
2076
+ XndObject *self_p;
2077
+ char ord = 'C';
2078
+ size_t n;
2079
+
2080
+ if (order != Qnil) {
2081
+ const char *c = RSTRING_PTR(order);
2082
+ if (strlen(c) != 1) {
2083
+ rb_raise(rb_eValueError, "'order' argument must be a 'C', 'F' or 'A'.");
2084
+ }
2085
+ ord = c[0];
2086
+ }
2087
+
2088
+ Check_Type(obj_shape, T_ARRAY);
2089
+
2090
+ n = RARRAY_LEN(obj_shape);
2091
+ if (n > NDT_MAX_DIM) {
2092
+ rb_raise(rb_eValueError, "too many dimensions.");
2093
+ }
2094
+
2095
+ for (int i = 0; i < n; ++i) {
2096
+ shape[i] = FIX2INT(rb_ary_entry(obj_shape, i));
2097
+ if (shape[i] < 0) {
2098
+ rb_raise(rb_eValueError, "negative dimension size.");
2099
+ }
2100
+ }
2101
+
2102
+ GET_XND(self, self_p);
2103
+ xnd_t view = xnd_reshape(XND(self_p), shape, (int)n, ord, &ctx);
2104
+ if (xnd_err_occurred(&view)) {
2105
+ seterr(&ctx);
2106
+ raise_error();
2107
+ }
2108
+
2109
+ return RubyXND_view_move_type(self_p, &view);
2110
+ }
2111
+
2112
+ /* XND#copy_contiguous */
2113
+ static VALUE
2114
+ XND_copy_contiguous(int argc, VALUE *argv, VALUE self)
2115
+ {
2116
+ NDT_STATIC_CONTEXT(ctx);
2117
+ XndObject *self_p, *dest_p;
2118
+ NdtObject *dtype_p;
2119
+ VALUE dtype;
2120
+ VALUE dest;
2121
+ const ndt_t *t;
2122
+ MemoryBlockObject *self_mblock_p;
2123
+
2124
+ if (argc == 1) {
2125
+ dtype = argv[0];
2126
+ }
2127
+ else if (argc == 0) {
2128
+ dtype = Qnil;
2129
+ }
2130
+ else {
2131
+ rb_raise(rb_eArgError, "copy_contiguous can accept only one arg for dtype.");
2132
+ }
2133
+
2134
+ GET_XND(self, self_p);
2135
+
2136
+ if (dtype != Qnil) {
2137
+ if (!rb_ndtypes_check_type(dtype)) {
2138
+ rb_raise(rb_eTypeError, "dtype must be of type ndtypes.");
2139
+ }
2140
+ t = ndt_copy_contiguous_dtype(XND_TYPE(self_p),
2141
+ rb_ndtypes_const_ndt(dtype),
2142
+ XND_INDEX(self_p), &ctx);
2143
+ }
2144
+ else {
2145
+ t = ndt_copy_contiguous(XND_TYPE(self_p), XND_INDEX(self_p), &ctx);
2146
+ }
2147
+
2148
+ if (t == NULL) {
2149
+ seterr(&ctx);
2150
+ }
2151
+
2152
+ dest = rb_xnd_empty_from_type(t, 0);
2153
+ ndt_decref(t);
2154
+
2155
+ GET_XND(dest, dest_p);
2156
+ GET_MBLOCK(self_p->mblock, self_mblock_p);
2157
+
2158
+ if (xnd_copy(XND(dest_p), XND(self_p), self_mblock_p->xnd->flags, &ctx) < 0) {
2159
+ seterr(&ctx);
2160
+ raise_error();
2161
+ }
2162
+
2163
+ return dest;
2164
+ }
2165
+
2166
+ static VALUE
2167
+ XND_serialize(VALUE self)
2168
+ {
2169
+ NDT_STATIC_CONTEXT(ctx);
2170
+ bool overflow = false;
2171
+ const xnd_t *x;
2172
+ const ndt_t* t;
2173
+ VALUE result;
2174
+ char *cp, *s;
2175
+ int64_t tlen, size;
2176
+ XndObject *self_p;
2177
+
2178
+ GET_XND(self, self_p);
2179
+ x = XND(self_p);
2180
+ t = XND_TYPE(self_p);
2181
+
2182
+ if (!ndt_is_pointer_free(t)) {
2183
+ rb_raise(rb_eNotImpError, "serializing memory blocks with pointers is not implemented.");
2184
+ }
2185
+
2186
+ if (ndt_is_optional(t) || ndt_subtree_is_optional(t)) {
2187
+ rb_raise(rb_eNotImpError, "serializing bitmaps is not implemented.");
2188
+ }
2189
+
2190
+ if (!ndt_is_c_contiguous(t) && !ndt_is_f_contiguous(t) &&
2191
+ !ndt_is_var_contiguous(t)) {
2192
+ rb_raise(rb_eNotImpError, "serializing non-contiguos memory blocks is not implemented.");
2193
+ }
2194
+
2195
+ tlen = ndt_serialize(&s, t, &ctx);
2196
+ if (tlen < 0) {
2197
+ seterr(&ctx);
2198
+ raise_error();
2199
+ }
2200
+
2201
+ size = ADDi64(t->datasize, tlen, &overflow);
2202
+ size = ADDi64(size, 8, &overflow);
2203
+ if (overflow) {
2204
+ ndt_free(s);
2205
+ /* FIXME: maybe create a new OverflowError for this. */
2206
+ rb_raise(rb_eTypeError, "too large to serialize.");
2207
+ }
2208
+
2209
+ result = rb_str_new(NULL, size);
2210
+ cp = RSTRING_PTR(result);
2211
+
2212
+ char *ptr = x->ptr;
2213
+ if (t->ndim != 0) {
2214
+ ptr = x->ptr + x->index * t->Concrete.FixedDim.itemsize;
2215
+ }
2216
+
2217
+ memcpy(cp, ptr, t->datasize); cp += t->datasize;
2218
+ memcpy(cp, s, tlen); cp += tlen;
2219
+ memcpy(cp, &t->datasize, 8);
2220
+ ndt_free(s);
2221
+
2222
+ return result;
2223
+ }
2224
+
2225
+
1810
2226
  /*************************** Singleton methods ********************************/
1811
2227
 
2228
+
2229
+ /* Implement XND.deserialize */
1812
2230
  static VALUE
1813
- XND_s_empty(VALUE klass, VALUE type)
2231
+ XND_s_deserialize(VALUE klass, VALUE v)
1814
2232
  {
2233
+ NDT_STATIC_CONTEXT(ctx);
2234
+ VALUE mblock, self;
2235
+ bool overflow = false;
2236
+ int64_t mblock_size;
2237
+ MemoryBlockObject *mblock_p;
1815
2238
  XndObject *self_p;
1816
- VALUE self, mblock;
2239
+
2240
+ Check_Type(v, T_STRING);
2241
+
2242
+ const int64_t size = RSTRING_LEN(v);
2243
+ if (size < 8) {
2244
+ goto invalid_format;
2245
+ }
2246
+
2247
+ const char *s = RSTRING_PTR(v);
2248
+ memcpy(&mblock_size, s+size-8, 8);
2249
+ if (mblock_size < 0) {
2250
+ goto invalid_format;
2251
+ }
2252
+
2253
+ const int64_t tmp = ADDi64(mblock_size, 8, &overflow);
2254
+ const int64_t tlen = size - tmp;
2255
+ if (overflow || tlen < 0) {
2256
+ goto invalid_format;
2257
+ }
2258
+
2259
+ const ndt_t *t = ndt_deserialize(s+mblock_size, tlen, &ctx);
2260
+ if (t == NULL) {
2261
+ seterr(&ctx);
2262
+ }
2263
+
2264
+ if (t->datasize != mblock_size) {
2265
+ goto invalid_format;
2266
+ }
2267
+
2268
+ VALUE type = rb_ndtypes_from_type(t);
2269
+ ndt_decref(t);
2270
+
2271
+ mblock = mblock_empty(type, XND_OWN_EMBEDDED);
2272
+ GET_MBLOCK(mblock, mblock_p);
2273
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
2274
+
2275
+ memcpy(mblock_p->xnd->master.ptr, s, mblock_size);
1817
2276
 
1818
2277
  self = XndObject_alloc();
1819
2278
  GET_XND(self, self_p);
2279
+ XND_from_mblock(self_p, mblock);
2280
+
2281
+ rb_xnd_gc_guard_register_xnd_mblock(self_p, mblock);
2282
+ rb_xnd_gc_guard_register_xnd_type(self_p, type);
2283
+
2284
+ return self;
1820
2285
 
1821
- type = rb_ndtypes_from_object(type);
1822
- mblock = mblock_empty(type);
2286
+ invalid_format:
2287
+ rb_raise(rb_eValueError, "invalid format for xnd deserialization.");
2288
+ }
2289
+
2290
+ static VALUE
2291
+ RubyXND_s_empty(VALUE klass, VALUE origin_type, VALUE device)
2292
+ {
2293
+ XndObject *self_p;
2294
+ MemoryBlockObject *mblock_p;
2295
+ VALUE self, mblock;
2296
+ VALUE type;
2297
+ VALUE mblock_type, xnd_type;
2298
+ uint32_t flags = 0;
2299
+
2300
+ self = XndObject_alloc();
2301
+ GET_XND(self, self_p);
1823
2302
 
2303
+ if (device != Qnil) {
2304
+ flags = device_flags(device);
2305
+ }
2306
+
2307
+ type = rb_ndtypes_from_object(origin_type);
2308
+ mblock = mblock_empty(type, flags);
2309
+
1824
2310
  XND_from_mblock(self_p, mblock);
1825
- rb_xnd_gc_guard_register(self_p, mblock);
2311
+ GET_MBLOCK(mblock, mblock_p);
2312
+
2313
+ rb_xnd_gc_guard_register_xnd_mblock(self_p, mblock);
2314
+ rb_xnd_gc_guard_register_xnd_type(self_p, type);
2315
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
1826
2316
 
1827
2317
  return self;
1828
2318
  }
@@ -1842,9 +2332,7 @@ rb_xnd_hash_size(VALUE hash)
1842
2332
  */
1843
2333
  int
1844
2334
  rb_xnd_get_complex_values(VALUE comp, double *real, double *imag)
1845
- {
1846
- Check_Type(comp, T_COMPLEX);
1847
-
2335
+ {
1848
2336
  *real = NUM2DBL(rb_funcall(comp, rb_intern("real"), 0, NULL));
1849
2337
  *imag = NUM2DBL(rb_funcall(comp, rb_intern("imag"), 0, NULL));
1850
2338
 
@@ -1873,36 +2361,45 @@ rb_xnd_const_xnd(VALUE xnd)
1873
2361
  VALUE
1874
2362
  rb_xnd_from_xnd(xnd_t *x)
1875
2363
  {
1876
- VALUE mblock, xnd;
2364
+ VALUE mblock, xnd, type;
1877
2365
  XndObject *xnd_p;
2366
+ MemoryBlockObject *mblock_p;
1878
2367
 
1879
2368
  mblock = mblock_from_xnd(x);
1880
2369
  xnd = XndObject_alloc();
1881
2370
  GET_XND(xnd, xnd_p);
2371
+ GET_MBLOCK(mblock, mblock_p);
2372
+ type = mblock_p->type;
1882
2373
 
1883
2374
  XND_from_mblock(xnd_p, mblock);
1884
- rb_xnd_gc_guard_register(xnd_p, mblock);
2375
+
2376
+ rb_xnd_gc_guard_register_xnd_mblock(xnd_p, mblock);
2377
+ rb_xnd_gc_guard_register_xnd_type(xnd_p, type);
2378
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
1885
2379
 
1886
2380
  return xnd;
1887
2381
  }
1888
2382
 
1889
2383
  /* Create an XND object of type ndt_t */
1890
2384
  VALUE
1891
- rb_xnd_empty_from_type(ndt_t *t)
2385
+ rb_xnd_empty_from_type(const ndt_t *t, uint32_t flags)
1892
2386
  {
1893
2387
  MemoryBlockObject *mblock_p;
1894
2388
  XndObject *xnd_p;
1895
2389
  VALUE type, mblock, xnd;
1896
2390
 
1897
2391
  type = rb_ndtypes_from_type(t);
1898
- mblock = mblock_empty(type);
2392
+ mblock = mblock_empty(type, flags);
1899
2393
  xnd = XndObject_alloc();
1900
2394
 
1901
2395
  GET_XND(xnd, xnd_p);
1902
- rb_xnd_gc_guard_register(xnd_p, mblock);
1903
2396
 
1904
2397
  XND_from_mblock(xnd_p, mblock);
1905
2398
 
2399
+ rb_xnd_gc_guard_register_xnd_mblock(xnd_p, mblock);
2400
+ rb_xnd_gc_guard_register_xnd_type(xnd_p, type);
2401
+ rb_xnd_gc_guard_register_mblock_type(mblock_p, type);
2402
+
1906
2403
  return xnd;
1907
2404
  }
1908
2405
 
@@ -1955,16 +2452,31 @@ void Init_ruby_xnd(void)
1955
2452
 
1956
2453
  /* initializers */
1957
2454
  rb_define_alloc_func(cRubyXND, RubyXND_allocate);
1958
- rb_define_method(cRubyXND, "initialize", RubyXND_initialize, 2);
2455
+ rb_define_method(cRubyXND, "initialize", RubyXND_initialize, 3);
2456
+
2457
+ /* singleton methods */
2458
+ rb_define_singleton_method(cRubyXND, "empty", RubyXND_s_empty, 2);
2459
+ rb_define_singleton_method(cXND, "deserialize", XND_s_deserialize, 1);
1959
2460
 
1960
2461
  /* instance methods */
1961
2462
  rb_define_method(cXND, "type", XND_type, 0);
2463
+ rb_define_method(cXND, "dtype", XND_dtype, 0);
1962
2464
  rb_define_method(cXND, "value", XND_value, 0);
1963
2465
  rb_define_method(cXND, "[]", XND_array_aref, -1);
1964
2466
  rb_define_method(cXND, "[]=", XND_array_store, -1);
1965
2467
  rb_define_method(cXND, "==", XND_eqeq, 1);
2468
+ rb_define_method(cXND, "serialize", XND_serialize, 0);
2469
+ rb_define_method(cXND, "copy_contiguous", XND_copy_contiguous, -1);
2470
+ rb_define_method(cXND, "_transpose", XND_transpose, 1);
2471
+ rb_define_method(cXND, "_reshape", XND_reshape, 2);
2472
+
1966
2473
  // rb_define_method(cXND, "!=", XND_neq, 1);
1967
2474
  rb_define_method(cXND, "<=>", XND_spaceship, 1);
2475
+ rb_define_method(cXND, "<", XND_lt, 1);
2476
+ rb_define_method(cXND, "<=", XND_lteq, 1);
2477
+ rb_define_method(cXND, ">", XND_gt, 1);
2478
+ rb_define_method(cXND, ">=", XND_gteq, 1);
2479
+ rb_define_method(cXND, "<=>", XND_spaceship, 1);
1968
2480
  rb_define_method(cXND, "strict_equal", XND_strict_equal, 1);
1969
2481
  rb_define_method(cXND, "size", XND_size, 0);
1970
2482
  rb_define_method(cXND, "short_value", XND_short_value, 1);
@@ -1972,9 +2484,6 @@ void Init_ruby_xnd(void)
1972
2484
  /* iterators */
1973
2485
  rb_define_method(cXND, "each", XND_each, 0);
1974
2486
 
1975
- /* singleton methods */
1976
- rb_define_singleton_method(cXND, "empty", XND_s_empty, 1);
1977
-
1978
2487
  /* GC guard */
1979
2488
  rb_xnd_init_gc_guard();
1980
2489