xnd 0.2.0dev3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CONTRIBUTING.md +42 -0
- data/Gemfile +3 -0
- data/History.md +0 -0
- data/README.md +7 -0
- data/Rakefile +135 -0
- data/ext/ruby_xnd/extconf.rb +70 -0
- data/ext/ruby_xnd/float_pack_unpack.c +277 -0
- data/ext/ruby_xnd/float_pack_unpack.h +39 -0
- data/ext/ruby_xnd/gc_guard.c +36 -0
- data/ext/ruby_xnd/gc_guard.h +12 -0
- data/ext/ruby_xnd/include/xnd.h +449 -0
- data/ext/ruby_xnd/lib/libxnd.a +0 -0
- data/ext/ruby_xnd/lib/libxnd.so +1 -0
- data/ext/ruby_xnd/lib/libxnd.so.0 +1 -0
- data/ext/ruby_xnd/lib/libxnd.so.0.2.0dev3 +0 -0
- data/ext/ruby_xnd/memory_block_object.c +32 -0
- data/ext/ruby_xnd/memory_block_object.h +33 -0
- data/ext/ruby_xnd/ruby_xnd.c +1953 -0
- data/ext/ruby_xnd/ruby_xnd.h +61 -0
- data/ext/ruby_xnd/ruby_xnd_internal.h +85 -0
- data/ext/ruby_xnd/util.h +170 -0
- data/ext/ruby_xnd/xnd/AUTHORS.txt +5 -0
- data/ext/ruby_xnd/xnd/INSTALL.txt +134 -0
- data/ext/ruby_xnd/xnd/LICENSE.txt +29 -0
- data/ext/ruby_xnd/xnd/MANIFEST.in +3 -0
- data/ext/ruby_xnd/xnd/Makefile.in +80 -0
- data/ext/ruby_xnd/xnd/README.rst +44 -0
- data/ext/ruby_xnd/xnd/config.guess +1530 -0
- data/ext/ruby_xnd/xnd/config.h.in +22 -0
- data/ext/ruby_xnd/xnd/config.sub +1782 -0
- data/ext/ruby_xnd/xnd/configure +4867 -0
- data/ext/ruby_xnd/xnd/configure.ac +164 -0
- data/ext/ruby_xnd/xnd/doc/Makefile +14 -0
- data/ext/ruby_xnd/xnd/doc/_static/copybutton.js +66 -0
- data/ext/ruby_xnd/xnd/doc/conf.py +26 -0
- data/ext/ruby_xnd/xnd/doc/index.rst +44 -0
- data/ext/ruby_xnd/xnd/doc/libxnd/data-structures.rst +186 -0
- data/ext/ruby_xnd/xnd/doc/libxnd/functions.rst +148 -0
- data/ext/ruby_xnd/xnd/doc/libxnd/index.rst +25 -0
- data/ext/ruby_xnd/xnd/doc/releases/index.rst +34 -0
- data/ext/ruby_xnd/xnd/doc/xnd/align-pack.rst +96 -0
- data/ext/ruby_xnd/xnd/doc/xnd/buffer-protocol.rst +42 -0
- data/ext/ruby_xnd/xnd/doc/xnd/index.rst +30 -0
- data/ext/ruby_xnd/xnd/doc/xnd/quickstart.rst +62 -0
- data/ext/ruby_xnd/xnd/doc/xnd/types.rst +674 -0
- data/ext/ruby_xnd/xnd/install-sh +527 -0
- data/ext/ruby_xnd/xnd/libxnd/Makefile.in +102 -0
- data/ext/ruby_xnd/xnd/libxnd/Makefile.vc +112 -0
- data/ext/ruby_xnd/xnd/libxnd/bitmaps.c +345 -0
- data/ext/ruby_xnd/xnd/libxnd/contrib.h +313 -0
- data/ext/ruby_xnd/xnd/libxnd/copy.c +944 -0
- data/ext/ruby_xnd/xnd/libxnd/equal.c +1216 -0
- data/ext/ruby_xnd/xnd/libxnd/inline.h +154 -0
- data/ext/ruby_xnd/xnd/libxnd/overflow.h +147 -0
- data/ext/ruby_xnd/xnd/libxnd/split.c +286 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/Makefile.in +39 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/Makefile.vc +44 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/README.txt +2 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/runtest.c +101 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/test.h +48 -0
- data/ext/ruby_xnd/xnd/libxnd/tests/test_fixed.c +108 -0
- data/ext/ruby_xnd/xnd/libxnd/xnd.c +1304 -0
- data/ext/ruby_xnd/xnd/libxnd/xnd.h +449 -0
- data/ext/ruby_xnd/xnd/python/test_xnd.py +3144 -0
- data/ext/ruby_xnd/xnd/python/xnd/__init__.py +290 -0
- data/ext/ruby_xnd/xnd/python/xnd/_xnd.c +2822 -0
- data/ext/ruby_xnd/xnd/python/xnd/contrib/pretty.py +850 -0
- data/ext/ruby_xnd/xnd/python/xnd/docstrings.h +129 -0
- data/ext/ruby_xnd/xnd/python/xnd/pyxnd.h +200 -0
- data/ext/ruby_xnd/xnd/python/xnd/util.h +182 -0
- data/ext/ruby_xnd/xnd/python/xnd_randvalue.py +1121 -0
- data/ext/ruby_xnd/xnd/python/xnd_support.py +106 -0
- data/ext/ruby_xnd/xnd/setup.py +303 -0
- data/ext/ruby_xnd/xnd/vcbuild/INSTALL.txt +42 -0
- data/ext/ruby_xnd/xnd/vcbuild/runtest32.bat +16 -0
- data/ext/ruby_xnd/xnd/vcbuild/runtest64.bat +14 -0
- data/ext/ruby_xnd/xnd/vcbuild/vcbuild32.bat +29 -0
- data/ext/ruby_xnd/xnd/vcbuild/vcbuild64.bat +29 -0
- data/ext/ruby_xnd/xnd/vcbuild/vcclean.bat +13 -0
- data/ext/ruby_xnd/xnd/vcbuild/vcdistclean.bat +14 -0
- data/lib/ruby_xnd.so +0 -0
- data/lib/xnd.rb +306 -0
- data/lib/xnd/monkeys.rb +29 -0
- data/lib/xnd/version.rb +6 -0
- data/spec/debug_spec.rb +9 -0
- data/spec/gc_guard_spec.rb +10 -0
- data/spec/leakcheck.rb +9 -0
- data/spec/spec_helper.rb +877 -0
- data/spec/type_inference_spec.rb +81 -0
- data/spec/xnd_spec.rb +2921 -0
- data/xnd.gemspec +47 -0
- metadata +215 -0
@@ -0,0 +1,39 @@
|
|
1
|
+
|
2
|
+
SRCDIR = ..
|
3
|
+
|
4
|
+
CC = @CC@
|
5
|
+
LIBSTATIC = @LIBSTATIC@
|
6
|
+
LIBSHARED = @LIBSHARED@
|
7
|
+
|
8
|
+
INCLUDES = @CONFIGURE_INCLUDES_TEST@
|
9
|
+
LIBS = @CONFIGURE_LIBS_TEST@
|
10
|
+
|
11
|
+
CONFIGURE_CFLAGS = @CONFIGURE_CFLAGS@
|
12
|
+
XND_CFLAGS = $(strip $(CONFIGURE_CFLAGS) $(CFLAGS))
|
13
|
+
|
14
|
+
|
15
|
+
default: runtest runtest_shared
|
16
|
+
|
17
|
+
|
18
|
+
runtest:\
|
19
|
+
Makefile runtest.c test_fixed.c test.h $(SRCDIR)/xnd.h $(SRCDIR)/$(LIBSTATIC)
|
20
|
+
$(CC) -I$(SRCDIR) -I$(INCLUDES) $(XND_CFLAGS) \
|
21
|
+
-o runtest runtest.c test_fixed.c $(SRCDIR)/libxnd.a \
|
22
|
+
$(LIBS)/libndtypes.a
|
23
|
+
|
24
|
+
runtest_shared:\
|
25
|
+
Makefile runtest.c test_fixed.c test.h $(SRCDIR)/xnd.h $(SRCDIR)/$(LIBSHARED)
|
26
|
+
$(CC) -I$(SRCDIR) -I$(INCLUDES) -L$(SRCDIR) -L$(LIBS) \
|
27
|
+
$(XND_CFLAGS) -o runtest_shared runtest.c test_fixed.c -lxnd -lndtypes
|
28
|
+
|
29
|
+
|
30
|
+
FORCE:
|
31
|
+
|
32
|
+
clean: FORCE
|
33
|
+
rm -f *.o *.gch *.gcda *.gcno *.gcov *.dyn *.dpi *.lock
|
34
|
+
rm -f runtest runtest_shared
|
35
|
+
|
36
|
+
distclean: clean
|
37
|
+
rm -rf Makefile
|
38
|
+
|
39
|
+
|
@@ -0,0 +1,44 @@
|
|
1
|
+
|
2
|
+
SRCDIR = ..
|
3
|
+
|
4
|
+
LIBSTATIC = libxnd-0.2.0dev3.lib
|
5
|
+
LIBSHARED = libxnd-0.2.0dev3.dll.lib
|
6
|
+
LIBNDTYPESSTATIC = libndtypes-0.2.0dev3.lib
|
7
|
+
LIBNDTYPESIMPORT = libndtypes-0.2.0dev3.dll.lib
|
8
|
+
|
9
|
+
!ifndef LIBNDTYPESINCLUDE
|
10
|
+
LIBNDTYPESINCLUDE = ..\..\ndtypes\libndtypes
|
11
|
+
!endif
|
12
|
+
|
13
|
+
!ifndef LIBNDTYPESDIR
|
14
|
+
LIBNDTYPESDIR = ..\..\ndtypes\libndtypes
|
15
|
+
!endif
|
16
|
+
|
17
|
+
|
18
|
+
CC = cl.exe
|
19
|
+
CFLAGS = /nologo /MT /Ox /GS /EHsc
|
20
|
+
CFLAGS_SHARED = /nologo /DXND_IMPORT /MD /Ox /GS /EHsc
|
21
|
+
|
22
|
+
default: runtest runtest_shared
|
23
|
+
|
24
|
+
|
25
|
+
runtest:\
|
26
|
+
Makefile runtest.c test_fixed.c test.h $(SRCDIR)\xnd.h $(SRCDIR)\$(LIBSTATIC)
|
27
|
+
$(CC) "-I$(SRCDIR)" "-I$(LIBNDTYPESINCLUDE)" $(CFLAGS) /Feruntest runtest.c \
|
28
|
+
test_fixed.c $(SRCDIR)\$(LIBSTATIC) /link "/LIBPATH:$(LIBNDTYPESDIR)" $(LIBNDTYPESSTATIC)
|
29
|
+
|
30
|
+
runtest_shared:\
|
31
|
+
Makefile runtest.c test_fixed.c test.h $(SRCDIR)\xnd.h $(SRCDIR)\$(LIBSHARED)
|
32
|
+
$(CC) "-I$(SRCDIR)" "-I$(LIBNDTYPESINCLUDE)" $(CFLAGS_SHARED) /Feruntest_shared \
|
33
|
+
runtest.c test_fixed.c $(SRCDIR)\$(LIBSHARED) /link "/LIBPATH:$(LIBNDTYPESDIR)" $(LIBNDTYPESIMPORT)
|
34
|
+
|
35
|
+
|
36
|
+
FORCE:
|
37
|
+
|
38
|
+
clean: FORCE
|
39
|
+
del /q /f *.exe *.obj *.lib *.dll *.exp *.manifest 2>NUL
|
40
|
+
|
41
|
+
distclean: clean
|
42
|
+
del /q /f Makefile 2>NUL
|
43
|
+
|
44
|
+
|
@@ -0,0 +1,101 @@
|
|
1
|
+
/*
|
2
|
+
* BSD 3-Clause License
|
3
|
+
*
|
4
|
+
* Copyright (c) 2017-2018, plures
|
5
|
+
* All rights reserved.
|
6
|
+
*
|
7
|
+
* Redistribution and use in source and binary forms, with or without
|
8
|
+
* modification, are permitted provided that the following conditions are met:
|
9
|
+
*
|
10
|
+
* 1. Redistributions of source code must retain the above copyright notice,
|
11
|
+
* this list of conditions and the following disclaimer.
|
12
|
+
*
|
13
|
+
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
14
|
+
* this list of conditions and the following disclaimer in the documentation
|
15
|
+
* and/or other materials provided with the distribution.
|
16
|
+
*
|
17
|
+
* 3. Neither the name of the copyright holder nor the names of its
|
18
|
+
* contributors may be used to endorse or promote products derived from
|
19
|
+
* this software without specific prior written permission.
|
20
|
+
*
|
21
|
+
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
22
|
+
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
23
|
+
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
24
|
+
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
25
|
+
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
26
|
+
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
27
|
+
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
28
|
+
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
29
|
+
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
30
|
+
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
|
+
*/
|
32
|
+
|
33
|
+
|
34
|
+
#include <stdio.h>
|
35
|
+
#include <stdlib.h>
|
36
|
+
#include <string.h>
|
37
|
+
#include <assert.h>
|
38
|
+
#include "ndtypes.h"
|
39
|
+
#include "test.h"
|
40
|
+
|
41
|
+
|
42
|
+
static int
|
43
|
+
init_tests(void)
|
44
|
+
{
|
45
|
+
ndt_context_t *ctx;
|
46
|
+
|
47
|
+
ctx = ndt_context_new();
|
48
|
+
if (ctx == NULL) {
|
49
|
+
fprintf(stderr, "error: out of memory");
|
50
|
+
return -1;
|
51
|
+
}
|
52
|
+
|
53
|
+
if (ndt_init(ctx) < 0) {
|
54
|
+
ndt_err_fprint(stderr, ctx);
|
55
|
+
ndt_context_del(ctx);
|
56
|
+
return -1;
|
57
|
+
}
|
58
|
+
|
59
|
+
if (xnd_init_float(ctx) < 0) {
|
60
|
+
ndt_err_fprint(stderr, ctx);
|
61
|
+
ndt_context_del(ctx);
|
62
|
+
return -1;
|
63
|
+
}
|
64
|
+
|
65
|
+
ndt_context_del(ctx);
|
66
|
+
return 0;
|
67
|
+
}
|
68
|
+
|
69
|
+
static int (*tests[])(void) = {
|
70
|
+
test_fixed,
|
71
|
+
NULL
|
72
|
+
};
|
73
|
+
|
74
|
+
int
|
75
|
+
main(void)
|
76
|
+
{
|
77
|
+
int (**f)(void);
|
78
|
+
int success = 0;
|
79
|
+
int fail = 0;
|
80
|
+
|
81
|
+
if (init_tests() < 0) {
|
82
|
+
return 1;
|
83
|
+
}
|
84
|
+
|
85
|
+
for (f = tests; *f != NULL; f++) {
|
86
|
+
if ((*f)() < 0)
|
87
|
+
fail++;
|
88
|
+
else
|
89
|
+
success++;
|
90
|
+
}
|
91
|
+
|
92
|
+
if (fail) {
|
93
|
+
fprintf(stderr, "\nFAIL (failures=%d)\n", fail);
|
94
|
+
}
|
95
|
+
else {
|
96
|
+
fprintf(stderr, "\n%d tests OK.\n", success);
|
97
|
+
}
|
98
|
+
|
99
|
+
ndt_finalize();
|
100
|
+
return fail ? 1 : 0;
|
101
|
+
}
|
@@ -0,0 +1,48 @@
|
|
1
|
+
/*
|
2
|
+
* BSD 3-Clause License
|
3
|
+
*
|
4
|
+
* Copyright (c) 2017-2018, plures
|
5
|
+
* All rights reserved.
|
6
|
+
*
|
7
|
+
* Redistribution and use in source and binary forms, with or without
|
8
|
+
* modification, are permitted provided that the following conditions are met:
|
9
|
+
*
|
10
|
+
* 1. Redistributions of source code must retain the above copyright notice,
|
11
|
+
* this list of conditions and the following disclaimer.
|
12
|
+
*
|
13
|
+
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
14
|
+
* this list of conditions and the following disclaimer in the documentation
|
15
|
+
* and/or other materials provided with the distribution.
|
16
|
+
*
|
17
|
+
* 3. Neither the name of the copyright holder nor the names of its
|
18
|
+
* contributors may be used to endorse or promote products derived from
|
19
|
+
* this software without specific prior written permission.
|
20
|
+
*
|
21
|
+
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
22
|
+
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
23
|
+
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
24
|
+
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
25
|
+
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
26
|
+
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
27
|
+
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
28
|
+
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
29
|
+
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
30
|
+
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
|
+
*/
|
32
|
+
|
33
|
+
|
34
|
+
#ifndef TEST_H
|
35
|
+
#define TEST_H
|
36
|
+
|
37
|
+
|
38
|
+
#include "ndtypes.h"
|
39
|
+
#include "xnd.h"
|
40
|
+
|
41
|
+
|
42
|
+
#define ARRAY_SIZE(a) ((int)(sizeof(a)/sizeof(a[0])))
|
43
|
+
|
44
|
+
|
45
|
+
int test_fixed(void);
|
46
|
+
|
47
|
+
|
48
|
+
#endif /* TEST_H */
|
@@ -0,0 +1,108 @@
|
|
1
|
+
/*
|
2
|
+
* BSD 3-Clause License
|
3
|
+
*
|
4
|
+
* Copyright (c) 2017-2018, plures
|
5
|
+
* All rights reserved.
|
6
|
+
*
|
7
|
+
* Redistribution and use in source and binary forms, with or without
|
8
|
+
* modification, are permitted provided that the following conditions are met:
|
9
|
+
*
|
10
|
+
* 1. Redistributions of source code must retain the above copyright notice,
|
11
|
+
* this list of conditions and the following disclaimer.
|
12
|
+
*
|
13
|
+
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
14
|
+
* this list of conditions and the following disclaimer in the documentation
|
15
|
+
* and/or other materials provided with the distribution.
|
16
|
+
*
|
17
|
+
* 3. Neither the name of the copyright holder nor the names of its
|
18
|
+
* contributors may be used to endorse or promote products derived from
|
19
|
+
* this software without specific prior written permission.
|
20
|
+
*
|
21
|
+
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
22
|
+
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
23
|
+
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
24
|
+
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
25
|
+
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
26
|
+
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
27
|
+
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
28
|
+
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
29
|
+
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
30
|
+
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
|
+
*/
|
32
|
+
|
33
|
+
|
34
|
+
#include <stdlib.h>
|
35
|
+
#include <stdint.h>
|
36
|
+
#include <assert.h>
|
37
|
+
#include "ndtypes.h"
|
38
|
+
#include "test.h"
|
39
|
+
|
40
|
+
|
41
|
+
int
|
42
|
+
test_fixed(void)
|
43
|
+
{
|
44
|
+
ndt_context_t *ctx;
|
45
|
+
xnd_master_t *x;
|
46
|
+
xnd_t view;
|
47
|
+
uint16_t *ptr;
|
48
|
+
int ret = 0;
|
49
|
+
int i, j, k, l;
|
50
|
+
int64_t indices[3];
|
51
|
+
|
52
|
+
/* a1 = [[[0, 1], [2, 3]], [[4, 5], [6, 7]], [[8, 9], [10, 11]]] */
|
53
|
+
const char *s = "3 * 2 * 2 * uint16";
|
54
|
+
uint16_t data[12] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
|
55
|
+
|
56
|
+
|
57
|
+
ctx = ndt_context_new();
|
58
|
+
if (ctx == NULL) {
|
59
|
+
fprintf(stderr, "out of memory\n");
|
60
|
+
return 1;
|
61
|
+
}
|
62
|
+
|
63
|
+
|
64
|
+
/***** Type with fixed dimensions *****/
|
65
|
+
x = xnd_empty_from_string(s, XND_OWN_ALL, ctx);
|
66
|
+
if (x == NULL) {
|
67
|
+
goto error;
|
68
|
+
}
|
69
|
+
|
70
|
+
ptr = (uint16_t *)x->master.ptr;
|
71
|
+
for (i = 0; i < ARRAY_SIZE(data); i++) {
|
72
|
+
ptr[i] = data[i];
|
73
|
+
}
|
74
|
+
|
75
|
+
for (i = 0; i < 3; i++) {
|
76
|
+
for (j = 0; j < 2; j++) {
|
77
|
+
for (k = 0; k < 2; k++) {
|
78
|
+
indices[0] = i; indices[1] = j; indices[2] = k;
|
79
|
+
l = i * 4 + j * 2 + k;
|
80
|
+
view = xnd_subtree_index(&x->master, indices, 3, ctx);
|
81
|
+
if (view.ptr == NULL) {
|
82
|
+
goto error;
|
83
|
+
}
|
84
|
+
assert(view.type->tag == Uint16);
|
85
|
+
if (*(uint16_t *)(view.ptr) != data[l]) {
|
86
|
+
ndt_err_format(ctx, NDT_RuntimeError, "unexpected value");
|
87
|
+
goto error;
|
88
|
+
}
|
89
|
+
}
|
90
|
+
}
|
91
|
+
}
|
92
|
+
|
93
|
+
|
94
|
+
fprintf(stderr, "test_fixed (1 test case)\n");
|
95
|
+
|
96
|
+
|
97
|
+
out:
|
98
|
+
xnd_del(x);
|
99
|
+
ndt_context_del(ctx);
|
100
|
+
return ret;
|
101
|
+
|
102
|
+
error:
|
103
|
+
ret = -1;
|
104
|
+
ndt_err_fprint(stderr, ctx);
|
105
|
+
goto out;
|
106
|
+
}
|
107
|
+
|
108
|
+
|
@@ -0,0 +1,1304 @@
|
|
1
|
+
/*
|
2
|
+
* BSD 3-Clause License
|
3
|
+
*
|
4
|
+
* Copyright (c) 2017-2018, plures
|
5
|
+
* All rights reserved.
|
6
|
+
*
|
7
|
+
* Redistribution and use in source and binary forms, with or without
|
8
|
+
* modification, are permitted provided that the following conditions are met:
|
9
|
+
*
|
10
|
+
* 1. Redistributions of source code must retain the above copyright notice,
|
11
|
+
* this list of conditions and the following disclaimer.
|
12
|
+
*
|
13
|
+
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
14
|
+
* this list of conditions and the following disclaimer in the documentation
|
15
|
+
* and/or other materials provided with the distribution.
|
16
|
+
*
|
17
|
+
* 3. Neither the name of the copyright holder nor the names of its
|
18
|
+
* contributors may be used to endorse or promote products derived from
|
19
|
+
* this software without specific prior written permission.
|
20
|
+
*
|
21
|
+
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
22
|
+
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
23
|
+
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
24
|
+
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
25
|
+
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
26
|
+
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
27
|
+
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
28
|
+
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
29
|
+
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
30
|
+
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
31
|
+
*/
|
32
|
+
|
33
|
+
|
34
|
+
#include <stdlib.h>
|
35
|
+
#include <stdint.h>
|
36
|
+
#include <inttypes.h>
|
37
|
+
#include <string.h>
|
38
|
+
#include <assert.h>
|
39
|
+
#include "ndtypes.h"
|
40
|
+
#include "xnd.h"
|
41
|
+
#include "inline.h"
|
42
|
+
#include "contrib.h"
|
43
|
+
|
44
|
+
|
45
|
+
static int xnd_init(xnd_t * const x, const uint32_t flags, ndt_context_t *ctx);
|
46
|
+
static void xnd_clear(xnd_t * const x, const uint32_t flags);
|
47
|
+
|
48
|
+
|
49
|
+
/*****************************************************************************/
|
50
|
+
/* Error handling */
|
51
|
+
/*****************************************************************************/
|
52
|
+
|
53
|
+
/* error return value */
|
54
|
+
const xnd_t xnd_error = {
|
55
|
+
.bitmap = {.data=NULL, .size=0, .next=NULL},
|
56
|
+
.index = 0,
|
57
|
+
.type = NULL,
|
58
|
+
.ptr = NULL
|
59
|
+
};
|
60
|
+
|
61
|
+
int
|
62
|
+
xnd_err_occurred(const xnd_t *x)
|
63
|
+
{
|
64
|
+
return x->ptr == NULL;
|
65
|
+
}
|
66
|
+
|
67
|
+
|
68
|
+
/*****************************************************************************/
|
69
|
+
/* Create and initialize a new master buffer */
|
70
|
+
/*****************************************************************************/
|
71
|
+
|
72
|
+
static bool
|
73
|
+
requires_init(const ndt_t * const t)
|
74
|
+
{
|
75
|
+
const ndt_t *dtype = ndt_dtype(t);
|
76
|
+
|
77
|
+
switch (dtype->tag) {
|
78
|
+
case Categorical:
|
79
|
+
case Bool:
|
80
|
+
case Int8: case Int16: case Int32: case Int64:
|
81
|
+
case Uint8: case Uint16: case Uint32: case Uint64:
|
82
|
+
case Float16: case Float32: case Float64:
|
83
|
+
case Complex32: case Complex64: case Complex128:
|
84
|
+
case FixedString: case FixedBytes:
|
85
|
+
case String: case Bytes:
|
86
|
+
return false;
|
87
|
+
default:
|
88
|
+
return true;
|
89
|
+
}
|
90
|
+
}
|
91
|
+
|
92
|
+
/* Create and initialize memory with type 't'. */
|
93
|
+
static char *
|
94
|
+
xnd_new(const ndt_t * const t, const uint32_t flags, ndt_context_t *ctx)
|
95
|
+
{
|
96
|
+
xnd_t x;
|
97
|
+
|
98
|
+
if (ndt_is_abstract(t)) {
|
99
|
+
ndt_err_format(ctx, NDT_ValueError,
|
100
|
+
"cannot create xnd container from abstract type");
|
101
|
+
return NULL;
|
102
|
+
}
|
103
|
+
|
104
|
+
x.index = 0;
|
105
|
+
x.type = t;
|
106
|
+
|
107
|
+
x.ptr = ndt_aligned_calloc(t->align, t->datasize);
|
108
|
+
if (x.ptr == NULL) {
|
109
|
+
ndt_memory_error(ctx);
|
110
|
+
return NULL;
|
111
|
+
}
|
112
|
+
|
113
|
+
if (requires_init(t) && xnd_init(&x, flags, ctx) < 0) {
|
114
|
+
ndt_aligned_free(x.ptr);
|
115
|
+
return NULL;
|
116
|
+
}
|
117
|
+
|
118
|
+
return x.ptr;
|
119
|
+
}
|
120
|
+
|
121
|
+
/*
|
122
|
+
* Initialize typed memory. If the XND_OWN_POINTERS flag is set, allocate
|
123
|
+
* memory for all ref subtypes and initialize that memory. Otherwise, set
|
124
|
+
* refs to NULL.
|
125
|
+
*
|
126
|
+
* Ref subtypes include any type of the form "Ref(t)".
|
127
|
+
*
|
128
|
+
* Never allocated are (sizes are not known):
|
129
|
+
* - "string" type (pointer to NUL-terminated UTF8 string)
|
130
|
+
* - data of the "bytes" type: {size: size_t, data: uint8_t *bytes}
|
131
|
+
*
|
132
|
+
* At all times the data pointers must be NULL or pointers to valid memory.
|
133
|
+
*/
|
134
|
+
static int
|
135
|
+
xnd_init(xnd_t * const x, const uint32_t flags, ndt_context_t *ctx)
|
136
|
+
{
|
137
|
+
const ndt_t * const t = x->type;
|
138
|
+
|
139
|
+
if (ndt_is_abstract(t)) {
|
140
|
+
ndt_err_format(ctx, NDT_ValueError,
|
141
|
+
"cannot initialize concrete memory from abstract type");
|
142
|
+
return -1;
|
143
|
+
}
|
144
|
+
|
145
|
+
switch (t->tag) {
|
146
|
+
case FixedDim: {
|
147
|
+
int64_t i;
|
148
|
+
|
149
|
+
for (i = 0; i < t->FixedDim.shape; i++) {
|
150
|
+
xnd_t next = _fixed_dim_next(x, i);
|
151
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
152
|
+
return -1;
|
153
|
+
}
|
154
|
+
}
|
155
|
+
|
156
|
+
return 0;
|
157
|
+
}
|
158
|
+
|
159
|
+
case VarDim: {
|
160
|
+
int64_t start, step, shape;
|
161
|
+
int64_t i;
|
162
|
+
|
163
|
+
shape = ndt_var_indices(&start, &step, t, x->index, ctx);
|
164
|
+
if (shape < 0) {
|
165
|
+
return -1;
|
166
|
+
}
|
167
|
+
|
168
|
+
for (i = 0; i < shape; i++) {
|
169
|
+
xnd_t next = _var_dim_next(x, start, step, i);
|
170
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
171
|
+
return -1;
|
172
|
+
}
|
173
|
+
}
|
174
|
+
|
175
|
+
return 0;
|
176
|
+
}
|
177
|
+
|
178
|
+
case Tuple: {
|
179
|
+
for (int64_t i = 0; i < t->Tuple.shape; i++) {
|
180
|
+
xnd_t next = _tuple_next(x, i);
|
181
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
182
|
+
xnd_clear(&next, flags);
|
183
|
+
return -1;
|
184
|
+
}
|
185
|
+
}
|
186
|
+
|
187
|
+
return 0;
|
188
|
+
}
|
189
|
+
|
190
|
+
case Record: {
|
191
|
+
for (int64_t i = 0; i < t->Record.shape; i++) {
|
192
|
+
xnd_t next = _record_next(x, i);
|
193
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
194
|
+
xnd_clear(&next, flags);
|
195
|
+
return -1;
|
196
|
+
}
|
197
|
+
}
|
198
|
+
|
199
|
+
return 0;
|
200
|
+
}
|
201
|
+
|
202
|
+
/*
|
203
|
+
* Ref represents a pointer to an explicit type. If XND_OWN_POINTERS
|
204
|
+
* is set, allocate memory for that type and set the pointer.
|
205
|
+
*/
|
206
|
+
case Ref: {
|
207
|
+
if (flags & XND_OWN_POINTERS) {
|
208
|
+
const ndt_t *u = t->Ref.type;
|
209
|
+
void *ref;
|
210
|
+
|
211
|
+
ref = ndt_aligned_calloc(u->align, u->datasize);
|
212
|
+
if (ref == NULL) {
|
213
|
+
ndt_err_format(ctx, NDT_MemoryError, "out of memory");
|
214
|
+
return -1;
|
215
|
+
}
|
216
|
+
XND_POINTER_DATA(x->ptr) = ref;
|
217
|
+
|
218
|
+
xnd_t next = _ref_next(x);
|
219
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
220
|
+
xnd_clear(&next, flags);
|
221
|
+
return -1;
|
222
|
+
}
|
223
|
+
}
|
224
|
+
|
225
|
+
return 0;
|
226
|
+
}
|
227
|
+
|
228
|
+
/* Constr is a named explicit type. */
|
229
|
+
case Constr: {
|
230
|
+
xnd_t next = _constr_next(x);
|
231
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
232
|
+
xnd_clear(&next, flags);
|
233
|
+
return -1;
|
234
|
+
}
|
235
|
+
|
236
|
+
return 0;
|
237
|
+
}
|
238
|
+
|
239
|
+
/* Nominal is a globally unique typedef. */
|
240
|
+
case Nominal: {
|
241
|
+
xnd_t next = _nominal_next(x);
|
242
|
+
if (xnd_init(&next, flags, ctx) < 0) {
|
243
|
+
xnd_clear(&next, flags);
|
244
|
+
return -1;
|
245
|
+
}
|
246
|
+
|
247
|
+
return 0;
|
248
|
+
}
|
249
|
+
|
250
|
+
/* Categorical is already initialized by calloc(). */
|
251
|
+
case Categorical:
|
252
|
+
return 0;
|
253
|
+
|
254
|
+
case Char:
|
255
|
+
ndt_err_format(ctx, NDT_NotImplementedError, "char not implemented");
|
256
|
+
return -1;
|
257
|
+
|
258
|
+
/* Primitive types are already initialized by calloc(). */
|
259
|
+
case Bool:
|
260
|
+
case Int8: case Int16: case Int32: case Int64:
|
261
|
+
case Uint8: case Uint16: case Uint32: case Uint64:
|
262
|
+
case Float16: case Float32: case Float64:
|
263
|
+
case Complex32: case Complex64: case Complex128:
|
264
|
+
case FixedString: case FixedBytes:
|
265
|
+
case String: case Bytes:
|
266
|
+
return 0;
|
267
|
+
|
268
|
+
/* NOT REACHED: intercepted by ndt_is_abstract(). */
|
269
|
+
case Module: case Function:
|
270
|
+
case AnyKind: case SymbolicDim: case EllipsisDim: case Typevar:
|
271
|
+
case ScalarKind: case SignedKind: case UnsignedKind: case FloatKind:
|
272
|
+
case ComplexKind: case FixedStringKind: case FixedBytesKind:
|
273
|
+
ndt_err_format(ctx, NDT_RuntimeError, "unexpected abstract type");
|
274
|
+
return -1;
|
275
|
+
}
|
276
|
+
|
277
|
+
/* NOT REACHED: tags should be exhaustive */
|
278
|
+
ndt_err_format(ctx, NDT_RuntimeError, "invalid type tag");
|
279
|
+
return -1;
|
280
|
+
}
|
281
|
+
|
282
|
+
/*
|
283
|
+
* Create a type from a string and return a new master buffer for that type.
|
284
|
+
* Any combination of flags that include XND_OWN_TYPE can be passed.
|
285
|
+
*/
|
286
|
+
xnd_master_t *
|
287
|
+
xnd_empty_from_string(const char *s, uint32_t flags, ndt_context_t *ctx)
|
288
|
+
{
|
289
|
+
xnd_bitmap_t b = {.data=NULL, .size=0, .next=NULL};
|
290
|
+
xnd_master_t *x;
|
291
|
+
ndt_t *t;
|
292
|
+
char *ptr;
|
293
|
+
|
294
|
+
if (!(flags & XND_OWN_TYPE)) {
|
295
|
+
ndt_err_format(ctx, NDT_InvalidArgumentError,
|
296
|
+
"xnd_empty_from_string: XND_OWN_TYPE must be set");
|
297
|
+
return NULL;
|
298
|
+
}
|
299
|
+
|
300
|
+
x = ndt_alloc(1, sizeof *x);
|
301
|
+
if (x == NULL) {
|
302
|
+
return ndt_memory_error(ctx);
|
303
|
+
}
|
304
|
+
|
305
|
+
t = ndt_from_string(s, ctx);
|
306
|
+
if (t == NULL) {
|
307
|
+
ndt_free(x);
|
308
|
+
return NULL;
|
309
|
+
}
|
310
|
+
|
311
|
+
if (!ndt_is_concrete(t)) {
|
312
|
+
ndt_err_format(ctx, NDT_ValueError, "type must be concrete");
|
313
|
+
ndt_del(t);
|
314
|
+
ndt_free(x);
|
315
|
+
return NULL;
|
316
|
+
}
|
317
|
+
|
318
|
+
if (xnd_bitmap_init(&b, t,ctx) < 0) {
|
319
|
+
ndt_del(t);
|
320
|
+
ndt_free(x);
|
321
|
+
return NULL;
|
322
|
+
}
|
323
|
+
|
324
|
+
ptr = xnd_new(t, flags, ctx);
|
325
|
+
if (ptr == NULL) {
|
326
|
+
xnd_bitmap_clear(&b);
|
327
|
+
ndt_del(t);
|
328
|
+
ndt_free(x);
|
329
|
+
return NULL;
|
330
|
+
}
|
331
|
+
|
332
|
+
x->flags = flags;
|
333
|
+
x->master.bitmap = b;
|
334
|
+
x->master.index = 0;
|
335
|
+
x->master.type = t;
|
336
|
+
x->master.ptr = ptr;
|
337
|
+
|
338
|
+
return x;
|
339
|
+
}
|
340
|
+
|
341
|
+
/*
|
342
|
+
* Return a new master buffer. Any combination of flags except for XND_OWN_TYPE
|
343
|
+
* can be passed. 't' must be kept valid as long as the master buffer is valid.
|
344
|
+
*/
|
345
|
+
xnd_master_t *
|
346
|
+
xnd_empty_from_type(const ndt_t *t, uint32_t flags, ndt_context_t *ctx)
|
347
|
+
{
|
348
|
+
xnd_bitmap_t b = {.data=NULL, .size=0, .next=NULL};
|
349
|
+
xnd_master_t *x;
|
350
|
+
char *ptr;
|
351
|
+
|
352
|
+
if (flags & XND_OWN_TYPE) {
|
353
|
+
ndt_err_format(ctx, NDT_InvalidArgumentError,
|
354
|
+
"xnd_empty_from_type: XND_OWN_TYPE must not be set");
|
355
|
+
return NULL;
|
356
|
+
}
|
357
|
+
|
358
|
+
if (!ndt_is_concrete(t)) {
|
359
|
+
ndt_err_format(ctx, NDT_ValueError, "type must be concrete");
|
360
|
+
return NULL;
|
361
|
+
}
|
362
|
+
|
363
|
+
x = ndt_alloc(1, sizeof *x);
|
364
|
+
if (x == NULL) {
|
365
|
+
return ndt_memory_error(ctx);
|
366
|
+
}
|
367
|
+
|
368
|
+
if (xnd_bitmap_init(&b, t, ctx) < 0) {
|
369
|
+
ndt_free(x);
|
370
|
+
return NULL;
|
371
|
+
}
|
372
|
+
|
373
|
+
ptr = xnd_new(t, flags, ctx);
|
374
|
+
if (ptr == NULL) {
|
375
|
+
xnd_bitmap_clear(&b);
|
376
|
+
ndt_free(x);
|
377
|
+
return NULL;
|
378
|
+
}
|
379
|
+
|
380
|
+
x->flags = flags;
|
381
|
+
x->master.bitmap = b;
|
382
|
+
x->master.index = 0;
|
383
|
+
x->master.type = t;
|
384
|
+
x->master.ptr = ptr;
|
385
|
+
|
386
|
+
return x;
|
387
|
+
}
|
388
|
+
|
389
|
+
/*
|
390
|
+
* Create master buffer from an existing xnd_t. Ownership of bitmaps, type,
|
391
|
+
* ptr is transferred to the master buffer.
|
392
|
+
*
|
393
|
+
* 'flags' are the master buffer's flags after the transfer. The flags of
|
394
|
+
* 'src' are always assumed to be XND_OWN_ALL.
|
395
|
+
*
|
396
|
+
* This is a convenience function that should only be used if the xnd_t src
|
397
|
+
* owns everything and its internals have not been exposed to other views.
|
398
|
+
*/
|
399
|
+
xnd_master_t *
|
400
|
+
xnd_from_xnd(xnd_t *src, uint32_t flags, ndt_context_t *ctx)
|
401
|
+
{
|
402
|
+
xnd_master_t *x;
|
403
|
+
|
404
|
+
x = ndt_alloc(1, sizeof *x);
|
405
|
+
if (x == NULL) {
|
406
|
+
xnd_clear(src, XND_OWN_ALL);
|
407
|
+
ndt_del((ndt_t *)src->type);
|
408
|
+
ndt_aligned_free(src->ptr);
|
409
|
+
xnd_bitmap_clear(&src->bitmap);
|
410
|
+
return ndt_memory_error(ctx);
|
411
|
+
}
|
412
|
+
|
413
|
+
x->flags = flags;
|
414
|
+
x->master = *src;
|
415
|
+
|
416
|
+
return x;
|
417
|
+
}
|
418
|
+
|
419
|
+
|
420
|
+
/*****************************************************************************/
|
421
|
+
/* Deallocate and clear a master buffer */
|
422
|
+
/*****************************************************************************/
|
423
|
+
|
424
|
+
static bool
|
425
|
+
requires_clear(const ndt_t * const t)
|
426
|
+
{
|
427
|
+
const ndt_t *dtype = ndt_dtype(t);
|
428
|
+
|
429
|
+
switch (dtype->tag) {
|
430
|
+
case Categorical:
|
431
|
+
case Bool:
|
432
|
+
case Int8: case Int16: case Int32: case Int64:
|
433
|
+
case Uint8: case Uint16: case Uint32: case Uint64:
|
434
|
+
case Float16: case Float32: case Float64:
|
435
|
+
case Complex32: case Complex64: case Complex128:
|
436
|
+
case FixedString: case FixedBytes:
|
437
|
+
return false;
|
438
|
+
default:
|
439
|
+
return true;
|
440
|
+
}
|
441
|
+
}
|
442
|
+
|
443
|
+
/* Clear an embedded pointer. */
|
444
|
+
static void
|
445
|
+
xnd_clear_ref(xnd_t *x, const uint32_t flags)
|
446
|
+
{
|
447
|
+
assert(x->type->tag == Ref);
|
448
|
+
|
449
|
+
if (flags & XND_OWN_POINTERS) {
|
450
|
+
ndt_aligned_free(XND_POINTER_DATA(x->ptr));
|
451
|
+
XND_POINTER_DATA(x->ptr) = NULL;
|
452
|
+
}
|
453
|
+
}
|
454
|
+
|
455
|
+
/* Strings must always be allocated by non-aligned allocators. */
|
456
|
+
static void
|
457
|
+
xnd_clear_string(xnd_t *x, const uint32_t flags)
|
458
|
+
{
|
459
|
+
assert(x->type->tag == String);
|
460
|
+
|
461
|
+
if (flags & XND_OWN_STRINGS) {
|
462
|
+
ndt_free(XND_POINTER_DATA(x->ptr));
|
463
|
+
XND_POINTER_DATA(x->ptr) = NULL;
|
464
|
+
}
|
465
|
+
}
|
466
|
+
|
467
|
+
/* Bytes must always be allocated by aligned allocators. */
|
468
|
+
static void
|
469
|
+
xnd_clear_bytes(xnd_t *x, const uint32_t flags)
|
470
|
+
{
|
471
|
+
assert(x->type->tag == Bytes);
|
472
|
+
|
473
|
+
if (flags & XND_OWN_BYTES) {
|
474
|
+
ndt_aligned_free(XND_BYTES_DATA(x->ptr));
|
475
|
+
XND_BYTES_DATA(x->ptr) = NULL;
|
476
|
+
}
|
477
|
+
}
|
478
|
+
|
479
|
+
/* Clear embedded pointers in the data according to flags. */
|
480
|
+
static void
|
481
|
+
xnd_clear(xnd_t * const x, const uint32_t flags)
|
482
|
+
{
|
483
|
+
NDT_STATIC_CONTEXT(ctx);
|
484
|
+
const ndt_t * const t = x->type;
|
485
|
+
|
486
|
+
assert(ndt_is_concrete(t));
|
487
|
+
|
488
|
+
switch (t->tag) {
|
489
|
+
case FixedDim: {
|
490
|
+
for (int64_t i = 0; i < t->FixedDim.shape; i++) {
|
491
|
+
xnd_t next = _fixed_dim_next(x, i);
|
492
|
+
xnd_clear(&next, flags);
|
493
|
+
}
|
494
|
+
|
495
|
+
return;
|
496
|
+
}
|
497
|
+
|
498
|
+
case VarDim: {
|
499
|
+
int64_t start, step, shape;
|
500
|
+
int64_t i;
|
501
|
+
|
502
|
+
shape = ndt_var_indices(&start, &step, t, x->index, &ctx);
|
503
|
+
if (shape < 0) {
|
504
|
+
/* This cannot happen: indices are checked in xnd_init() and
|
505
|
+
* should remain constant. */
|
506
|
+
ndt_context_del(&ctx);
|
507
|
+
fprintf(stderr, "xnd_clear: internal error: var indices changed\n");
|
508
|
+
return;
|
509
|
+
}
|
510
|
+
|
511
|
+
for (i = 0; i < shape; i++) {
|
512
|
+
xnd_t next = _var_dim_next(x, start, step, i);
|
513
|
+
xnd_clear(&next, flags);
|
514
|
+
}
|
515
|
+
|
516
|
+
return;
|
517
|
+
}
|
518
|
+
|
519
|
+
case Tuple: {
|
520
|
+
for (int64_t i = 0; i < t->Tuple.shape; i++) {
|
521
|
+
xnd_t next = _tuple_next(x, i);
|
522
|
+
xnd_clear(&next, flags);
|
523
|
+
}
|
524
|
+
|
525
|
+
return;
|
526
|
+
}
|
527
|
+
|
528
|
+
case Record: {
|
529
|
+
for (int64_t i = 0; i < t->Record.shape; i++) {
|
530
|
+
xnd_t next = _record_next(x, i);
|
531
|
+
xnd_clear(&next, flags);
|
532
|
+
}
|
533
|
+
|
534
|
+
return;
|
535
|
+
}
|
536
|
+
|
537
|
+
case Ref: {
|
538
|
+
if (flags & XND_OWN_POINTERS) {
|
539
|
+
xnd_t next = _ref_next(x);
|
540
|
+
xnd_clear(&next, flags);
|
541
|
+
xnd_clear_ref(x, flags);
|
542
|
+
}
|
543
|
+
|
544
|
+
return;
|
545
|
+
}
|
546
|
+
|
547
|
+
case Constr: {
|
548
|
+
xnd_t next = _constr_next(x);
|
549
|
+
xnd_clear(&next, flags);
|
550
|
+
return;
|
551
|
+
}
|
552
|
+
|
553
|
+
case Nominal: {
|
554
|
+
xnd_t next = _nominal_next(x);
|
555
|
+
xnd_clear(&next, flags);
|
556
|
+
return;
|
557
|
+
}
|
558
|
+
|
559
|
+
case Bool:
|
560
|
+
case Int8: case Int16: case Int32: case Int64:
|
561
|
+
case Uint8: case Uint16: case Uint32: case Uint64:
|
562
|
+
case Float16: case Float32: case Float64:
|
563
|
+
case Complex32: case Complex64: case Complex128:
|
564
|
+
case FixedString: case FixedBytes:
|
565
|
+
return;
|
566
|
+
|
567
|
+
case String:
|
568
|
+
xnd_clear_string(x, flags);
|
569
|
+
return;
|
570
|
+
|
571
|
+
case Bytes:
|
572
|
+
xnd_clear_bytes(x, flags);
|
573
|
+
return;
|
574
|
+
|
575
|
+
case Categorical:
|
576
|
+
/* Categorical values are just indices into the categories. */
|
577
|
+
return;
|
578
|
+
|
579
|
+
case Char:
|
580
|
+
/* Just a scalar. */
|
581
|
+
return;
|
582
|
+
|
583
|
+
/* NOT REACHED: intercepted by ndt_is_abstract(). */
|
584
|
+
case Module: case Function:
|
585
|
+
case AnyKind: case SymbolicDim: case EllipsisDim: case Typevar:
|
586
|
+
case ScalarKind: case SignedKind: case UnsignedKind: case FloatKind:
|
587
|
+
case ComplexKind: case FixedStringKind: case FixedBytesKind:
|
588
|
+
return;
|
589
|
+
}
|
590
|
+
}
|
591
|
+
|
592
|
+
/*
|
593
|
+
* Delete an xnd_t buffer according to 'flags'. Outside xnd_del(), this
|
594
|
+
* function should only be used if an xnd_t owns all its members.
|
595
|
+
*/
|
596
|
+
void
|
597
|
+
xnd_del_buffer(xnd_t *x, uint32_t flags)
|
598
|
+
{
|
599
|
+
if (x != NULL) {
|
600
|
+
if (x->ptr != NULL && x->type != NULL) {
|
601
|
+
if ((flags&XND_OWN_DATA) && requires_clear(x->type)) {
|
602
|
+
xnd_clear(x, flags);
|
603
|
+
}
|
604
|
+
|
605
|
+
if (flags & XND_OWN_TYPE) {
|
606
|
+
ndt_del((ndt_t *)x->type);
|
607
|
+
}
|
608
|
+
|
609
|
+
if (flags & XND_OWN_DATA) {
|
610
|
+
ndt_aligned_free(x->ptr);
|
611
|
+
}
|
612
|
+
}
|
613
|
+
|
614
|
+
if (flags & XND_OWN_DATA) {
|
615
|
+
xnd_bitmap_clear(&x->bitmap);
|
616
|
+
}
|
617
|
+
}
|
618
|
+
}
|
619
|
+
|
620
|
+
/*
|
621
|
+
* Delete the master buffer. The type and embedded pointers are deallocated
|
622
|
+
* according to x->flags.
|
623
|
+
*/
|
624
|
+
void
|
625
|
+
xnd_del(xnd_master_t *x)
|
626
|
+
{
|
627
|
+
if (x != NULL) {
|
628
|
+
xnd_del_buffer(&x->master, x->flags);
|
629
|
+
ndt_free(x);
|
630
|
+
}
|
631
|
+
}
|
632
|
+
|
633
|
+
|
634
|
+
/*****************************************************************************/
|
635
|
+
/* Subtrees (single elements are a special case) */
|
636
|
+
/*****************************************************************************/
|
637
|
+
|
638
|
+
static int64_t
|
639
|
+
get_index(const xnd_index_t *key, int64_t shape, ndt_context_t *ctx)
|
640
|
+
{
|
641
|
+
switch (key->tag) {
|
642
|
+
case Index: {
|
643
|
+
int64_t i = key->Index;
|
644
|
+
if (i < 0) {
|
645
|
+
i += shape;
|
646
|
+
}
|
647
|
+
|
648
|
+
if (i < 0 || i >= shape || i > XND_SSIZE_MAX) {
|
649
|
+
ndt_err_format(ctx, NDT_IndexError,
|
650
|
+
"index with value %" PRIi64 " out of bounds", key->Index);
|
651
|
+
return -1;
|
652
|
+
}
|
653
|
+
|
654
|
+
return i;
|
655
|
+
}
|
656
|
+
|
657
|
+
case FieldName:
|
658
|
+
ndt_err_format(ctx, NDT_ValueError,
|
659
|
+
"expected integer index, got field name: '%s'", key->FieldName);
|
660
|
+
return -1;
|
661
|
+
|
662
|
+
case Slice:
|
663
|
+
ndt_err_format(ctx, NDT_ValueError,
|
664
|
+
"expected integer index, got slice");
|
665
|
+
return -1;
|
666
|
+
}
|
667
|
+
|
668
|
+
/* NOT REACHED: tags should be exhaustive */
|
669
|
+
ndt_err_format(ctx, NDT_RuntimeError, "invalid index tag");
|
670
|
+
return -1;
|
671
|
+
}
|
672
|
+
|
673
|
+
static int64_t
|
674
|
+
get_index_record(const ndt_t *t, const xnd_index_t *key, ndt_context_t *ctx)
|
675
|
+
{
|
676
|
+
assert(t->tag == Record);
|
677
|
+
|
678
|
+
switch (key->tag) {
|
679
|
+
case FieldName: {
|
680
|
+
int64_t i;
|
681
|
+
|
682
|
+
for (i = 0; i < t->Record.shape; i++) {
|
683
|
+
if (strcmp(key->FieldName, t->Record.names[i]) == 0) {
|
684
|
+
return i;
|
685
|
+
}
|
686
|
+
}
|
687
|
+
|
688
|
+
ndt_err_format(ctx, NDT_ValueError,
|
689
|
+
"invalid field name '%s'", key->FieldName);
|
690
|
+
return -1;
|
691
|
+
}
|
692
|
+
case Index: case Slice:
|
693
|
+
return get_index(key, t->Record.shape, ctx);
|
694
|
+
}
|
695
|
+
|
696
|
+
/* NOT REACHED: tags should be exhaustive */
|
697
|
+
ndt_err_format(ctx, NDT_RuntimeError, "invalid index tag");
|
698
|
+
return -1;
|
699
|
+
}
|
700
|
+
|
701
|
+
static void
|
702
|
+
set_index_exception(bool indexable, ndt_context_t *ctx)
|
703
|
+
{
|
704
|
+
if (indexable) {
|
705
|
+
ndt_err_format(ctx, NDT_IndexError, "too many indices");
|
706
|
+
}
|
707
|
+
else {
|
708
|
+
ndt_err_format(ctx, NDT_TypeError, "type not indexable");
|
709
|
+
}
|
710
|
+
}
|
711
|
+
|
712
|
+
/* Return a typed subtree of a memory block */
|
713
|
+
xnd_t
|
714
|
+
xnd_subtree_index(const xnd_t *x, const int64_t *indices, int len, ndt_context_t *ctx)
|
715
|
+
{
|
716
|
+
const ndt_t * const t = x->type;
|
717
|
+
|
718
|
+
assert(ndt_is_concrete(t));
|
719
|
+
|
720
|
+
if (t->ndim > 0 && ndt_is_optional(t)) {
|
721
|
+
ndt_err_format(ctx, NDT_NotImplementedError,
|
722
|
+
"optional dimensions are not supported");
|
723
|
+
return xnd_error;
|
724
|
+
}
|
725
|
+
|
726
|
+
if (len == 0) {
|
727
|
+
return *x;
|
728
|
+
}
|
729
|
+
|
730
|
+
const int64_t i = indices[0];
|
731
|
+
|
732
|
+
switch (t->tag) {
|
733
|
+
case FixedDim: {
|
734
|
+
if (i < 0 || i >= t->FixedDim.shape) {
|
735
|
+
ndt_err_format(ctx, NDT_ValueError,
|
736
|
+
"fixed dim index out of bounds");
|
737
|
+
return xnd_error;
|
738
|
+
}
|
739
|
+
|
740
|
+
const xnd_t next = xnd_fixed_dim_next(x, i);
|
741
|
+
return xnd_subtree_index(&next, indices+1, len-1, ctx);
|
742
|
+
}
|
743
|
+
|
744
|
+
case VarDim: {
|
745
|
+
int64_t start, step, shape;
|
746
|
+
|
747
|
+
shape = ndt_var_indices(&start, &step, t, x->index, ctx);
|
748
|
+
if (shape < 0) {
|
749
|
+
return xnd_error;
|
750
|
+
}
|
751
|
+
|
752
|
+
if (i < 0 || i >= shape) {
|
753
|
+
ndt_err_format(ctx, NDT_ValueError, "var dim index out of bounds");
|
754
|
+
return xnd_error;
|
755
|
+
}
|
756
|
+
|
757
|
+
const xnd_t next = xnd_var_dim_next(x, start, step, i);
|
758
|
+
return xnd_subtree_index(&next, indices+1, len-1, ctx);
|
759
|
+
}
|
760
|
+
|
761
|
+
case Tuple: {
|
762
|
+
if (i < 0 || i >= t->Tuple.shape) {
|
763
|
+
ndt_err_format(ctx, NDT_ValueError, "tuple index out of bounds");
|
764
|
+
return xnd_error;
|
765
|
+
}
|
766
|
+
|
767
|
+
const xnd_t next = xnd_tuple_next(x, i, ctx);
|
768
|
+
if (next.ptr == NULL) {
|
769
|
+
return xnd_error;
|
770
|
+
}
|
771
|
+
|
772
|
+
return xnd_subtree_index(&next, indices+1, len-1, ctx);
|
773
|
+
}
|
774
|
+
|
775
|
+
case Record: {
|
776
|
+
if (i < 0 || i >= t->Record.shape) {
|
777
|
+
ndt_err_format(ctx, NDT_ValueError, "record index out of bounds");
|
778
|
+
return xnd_error;
|
779
|
+
}
|
780
|
+
|
781
|
+
const xnd_t next = xnd_record_next(x, i, ctx);
|
782
|
+
if (next.ptr == NULL) {
|
783
|
+
return xnd_error;
|
784
|
+
}
|
785
|
+
|
786
|
+
return xnd_subtree_index(&next, indices+1, len-1, ctx);
|
787
|
+
}
|
788
|
+
|
789
|
+
case Ref: {
|
790
|
+
const xnd_t next = xnd_ref_next(x, ctx);
|
791
|
+
if (next.ptr == NULL) {
|
792
|
+
return xnd_error;
|
793
|
+
}
|
794
|
+
|
795
|
+
return xnd_subtree_index(&next, indices, len, ctx);
|
796
|
+
}
|
797
|
+
|
798
|
+
case Constr: {
|
799
|
+
const xnd_t next = xnd_constr_next(x, ctx);
|
800
|
+
if (next.ptr == NULL) {
|
801
|
+
return xnd_error;
|
802
|
+
}
|
803
|
+
|
804
|
+
return xnd_subtree_index(&next, indices, len, ctx);
|
805
|
+
}
|
806
|
+
|
807
|
+
case Nominal: {
|
808
|
+
const xnd_t next = xnd_nominal_next(x, ctx);
|
809
|
+
if (next.ptr == NULL) {
|
810
|
+
return xnd_error;
|
811
|
+
}
|
812
|
+
|
813
|
+
return xnd_subtree_index(&next, indices, len, ctx);
|
814
|
+
}
|
815
|
+
|
816
|
+
default:
|
817
|
+
ndt_err_format(ctx, NDT_ValueError, "type not indexable");
|
818
|
+
return xnd_error;
|
819
|
+
}
|
820
|
+
}
|
821
|
+
|
822
|
+
/*
|
823
|
+
* Return a zero copy view of an xnd object. If a dtype is indexable,
|
824
|
+
* descend into the dtype.
|
825
|
+
*/
|
826
|
+
static xnd_t
|
827
|
+
_xnd_subtree(const xnd_t *x, const xnd_index_t indices[], int len, bool indexable,
|
828
|
+
ndt_context_t *ctx)
|
829
|
+
{
|
830
|
+
const ndt_t *t = x->type;
|
831
|
+
const xnd_index_t *key;
|
832
|
+
|
833
|
+
assert(ndt_is_concrete(t));
|
834
|
+
|
835
|
+
if (t->ndim > 0 && ndt_is_optional(t)) {
|
836
|
+
ndt_err_format(ctx, NDT_NotImplementedError,
|
837
|
+
"optional dimensions are not supported");
|
838
|
+
return xnd_error;
|
839
|
+
}
|
840
|
+
|
841
|
+
if (len == 0) {
|
842
|
+
return *x;
|
843
|
+
}
|
844
|
+
|
845
|
+
key = &indices[0];
|
846
|
+
|
847
|
+
switch (t->tag) {
|
848
|
+
case FixedDim: {
|
849
|
+
int64_t i = get_index(key, t->FixedDim.shape, ctx);
|
850
|
+
if (i < 0) {
|
851
|
+
return xnd_error;
|
852
|
+
}
|
853
|
+
|
854
|
+
const xnd_t next = xnd_fixed_dim_next(x, i);
|
855
|
+
return _xnd_subtree(&next, indices+1, len-1, true, ctx);
|
856
|
+
}
|
857
|
+
|
858
|
+
case VarDim: {
|
859
|
+
int64_t start, step, shape;
|
860
|
+
int64_t i;
|
861
|
+
|
862
|
+
shape = ndt_var_indices(&start, &step, t, x->index, ctx);
|
863
|
+
if (shape < 0) {
|
864
|
+
return xnd_error;
|
865
|
+
}
|
866
|
+
|
867
|
+
i = get_index(key, shape, ctx);
|
868
|
+
if (i < 0) {
|
869
|
+
return xnd_error;
|
870
|
+
}
|
871
|
+
|
872
|
+
const xnd_t next = xnd_var_dim_next(x, start, step, i);
|
873
|
+
return _xnd_subtree(&next, indices+1, len-1, true, ctx);
|
874
|
+
}
|
875
|
+
|
876
|
+
case Tuple: {
|
877
|
+
const int64_t i = get_index(key, t->Tuple.shape, ctx);
|
878
|
+
if (i < 0) {
|
879
|
+
return xnd_error;
|
880
|
+
}
|
881
|
+
|
882
|
+
const xnd_t next = xnd_tuple_next(x, i, ctx);
|
883
|
+
if (next.ptr == NULL) {
|
884
|
+
return xnd_error;
|
885
|
+
}
|
886
|
+
|
887
|
+
return _xnd_subtree(&next, indices+1, len-1, true, ctx);
|
888
|
+
}
|
889
|
+
|
890
|
+
case Record: {
|
891
|
+
int64_t i = get_index_record(t, key, ctx);
|
892
|
+
if (i < 0) {
|
893
|
+
return xnd_error;
|
894
|
+
}
|
895
|
+
|
896
|
+
const xnd_t next = xnd_record_next(x, i, ctx);
|
897
|
+
if (next.ptr == NULL) {
|
898
|
+
return xnd_error;
|
899
|
+
}
|
900
|
+
|
901
|
+
return _xnd_subtree(&next, indices+1, len-1, true, ctx);
|
902
|
+
}
|
903
|
+
|
904
|
+
case Ref: {
|
905
|
+
const xnd_t next = xnd_ref_next(x, ctx);
|
906
|
+
if (next.ptr == NULL) {
|
907
|
+
return xnd_error;
|
908
|
+
}
|
909
|
+
|
910
|
+
return _xnd_subtree(&next, indices, len, false, ctx);
|
911
|
+
}
|
912
|
+
|
913
|
+
case Constr: {
|
914
|
+
const xnd_t next = xnd_constr_next(x, ctx);
|
915
|
+
if (next.ptr == NULL) {
|
916
|
+
return xnd_error;
|
917
|
+
}
|
918
|
+
|
919
|
+
return _xnd_subtree(&next, indices, len, false, ctx);
|
920
|
+
}
|
921
|
+
|
922
|
+
case Nominal: {
|
923
|
+
const xnd_t next = xnd_nominal_next(x, ctx);
|
924
|
+
if (next.ptr == NULL) {
|
925
|
+
return xnd_error;
|
926
|
+
}
|
927
|
+
|
928
|
+
return _xnd_subtree(&next, indices, len, false, ctx);
|
929
|
+
}
|
930
|
+
|
931
|
+
default:
|
932
|
+
set_index_exception(indexable, ctx);
|
933
|
+
return xnd_error;
|
934
|
+
}
|
935
|
+
}
|
936
|
+
|
937
|
+
/*
|
938
|
+
* Return a zero copy view of an xnd object. If a dtype is indexable,
|
939
|
+
* descend into the dtype.
|
940
|
+
*/
|
941
|
+
xnd_t
|
942
|
+
xnd_subtree(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx)
|
943
|
+
{
|
944
|
+
return _xnd_subtree(x, indices, len, false, ctx);
|
945
|
+
}
|
946
|
+
|
947
|
+
static xnd_t xnd_index(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx);
|
948
|
+
static xnd_t xnd_slice(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx);
|
949
|
+
|
950
|
+
xnd_t
|
951
|
+
xnd_multikey(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx)
|
952
|
+
{
|
953
|
+
const ndt_t *t = x->type;
|
954
|
+
const xnd_index_t *key;
|
955
|
+
|
956
|
+
assert(len >= 0);
|
957
|
+
assert(ndt_is_concrete(t));
|
958
|
+
assert(x->ptr != NULL);
|
959
|
+
|
960
|
+
if (len > t->ndim) {
|
961
|
+
ndt_err_format(ctx, NDT_IndexError, "too many indices");
|
962
|
+
return xnd_error;
|
963
|
+
}
|
964
|
+
|
965
|
+
if (len == 0) {
|
966
|
+
xnd_t next = *x;
|
967
|
+
next.type = ndt_copy(t, ctx);
|
968
|
+
if (next.type == NULL) {
|
969
|
+
return xnd_error;
|
970
|
+
}
|
971
|
+
|
972
|
+
return next;
|
973
|
+
}
|
974
|
+
|
975
|
+
key = &indices[0];
|
976
|
+
|
977
|
+
switch (key->tag) {
|
978
|
+
case Index:
|
979
|
+
return xnd_index(x, indices, len, ctx);
|
980
|
+
case Slice:
|
981
|
+
return xnd_slice(x, indices, len, ctx);
|
982
|
+
case FieldName:
|
983
|
+
ndt_err_format(ctx, NDT_RuntimeError,
|
984
|
+
"xnd_multikey: internal error: key must be index or slice");
|
985
|
+
return xnd_error;
|
986
|
+
}
|
987
|
+
|
988
|
+
/* NOT REACHED: tags should be exhaustive */
|
989
|
+
ndt_err_format(ctx, NDT_RuntimeError, "invalid index tag");
|
990
|
+
return xnd_error;
|
991
|
+
}
|
992
|
+
|
993
|
+
/*
|
994
|
+
* Return a view with a copy of the type. Indexing into the dtype is
|
995
|
+
* not permitted.
|
996
|
+
*/
|
997
|
+
static xnd_t
|
998
|
+
xnd_index(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx)
|
999
|
+
{
|
1000
|
+
const ndt_t *t = x->type;
|
1001
|
+
const xnd_index_t *key;
|
1002
|
+
|
1003
|
+
assert(len > 0);
|
1004
|
+
assert(ndt_is_concrete(t));
|
1005
|
+
assert(x->ptr != NULL);
|
1006
|
+
|
1007
|
+
key = &indices[0];
|
1008
|
+
assert(key->tag == Index);
|
1009
|
+
|
1010
|
+
switch (t->tag) {
|
1011
|
+
case FixedDim: {
|
1012
|
+
const int64_t i = get_index(key, t->FixedDim.shape, ctx);
|
1013
|
+
if (i < 0) {
|
1014
|
+
return xnd_error;
|
1015
|
+
}
|
1016
|
+
|
1017
|
+
const xnd_t next = xnd_fixed_dim_next(x, i);
|
1018
|
+
return xnd_multikey(&next, indices+1, len-1, ctx);
|
1019
|
+
}
|
1020
|
+
|
1021
|
+
case VarDim: {
|
1022
|
+
ndt_err_format(ctx, NDT_IndexError,
|
1023
|
+
"mixed indexing and slicing is not supported for var dimensions");
|
1024
|
+
return xnd_error;
|
1025
|
+
}
|
1026
|
+
|
1027
|
+
default:
|
1028
|
+
ndt_err_format(ctx, NDT_IndexError, "type is not indexable");
|
1029
|
+
return xnd_error;
|
1030
|
+
}
|
1031
|
+
}
|
1032
|
+
|
1033
|
+
static xnd_t
|
1034
|
+
xnd_slice(const xnd_t *x, const xnd_index_t indices[], int len, ndt_context_t *ctx)
|
1035
|
+
{
|
1036
|
+
const ndt_t *t = x->type;
|
1037
|
+
const xnd_index_t *key;
|
1038
|
+
|
1039
|
+
assert(len > 0);
|
1040
|
+
assert(ndt_is_concrete(t));
|
1041
|
+
assert(x->ptr != NULL);
|
1042
|
+
|
1043
|
+
key = &indices[0];
|
1044
|
+
assert(key->tag == Slice);
|
1045
|
+
|
1046
|
+
switch (t->tag) {
|
1047
|
+
case FixedDim: {
|
1048
|
+
int64_t start = key->Slice.start;
|
1049
|
+
int64_t stop = key->Slice.stop;
|
1050
|
+
int64_t step = key->Slice.step;
|
1051
|
+
int64_t shape;
|
1052
|
+
|
1053
|
+
shape = xnd_slice_adjust_indices(t->FixedDim.shape, &start, &stop, step);
|
1054
|
+
|
1055
|
+
const xnd_t next = xnd_fixed_dim_next(x, start);
|
1056
|
+
const xnd_t sliced = xnd_multikey(&next, indices+1, len-1, ctx);
|
1057
|
+
if (sliced.ptr == NULL) {
|
1058
|
+
return xnd_error;
|
1059
|
+
}
|
1060
|
+
|
1061
|
+
xnd_t ret = *x;
|
1062
|
+
ret.type = ndt_fixed_dim((ndt_t *)sliced.type, shape,
|
1063
|
+
t->Concrete.FixedDim.step * step,
|
1064
|
+
ctx);
|
1065
|
+
if (ret.type == NULL) {
|
1066
|
+
return xnd_error;
|
1067
|
+
}
|
1068
|
+
ret.index = sliced.index;
|
1069
|
+
|
1070
|
+
return ret;
|
1071
|
+
}
|
1072
|
+
|
1073
|
+
case VarDim: {
|
1074
|
+
int64_t start = key->Slice.start;
|
1075
|
+
int64_t stop = key->Slice.stop;
|
1076
|
+
int64_t step = key->Slice.step;
|
1077
|
+
ndt_slice_t *slices;
|
1078
|
+
int32_t nslices;
|
1079
|
+
|
1080
|
+
if (ndt_is_optional(t)) {
|
1081
|
+
ndt_err_format(ctx, NDT_NotImplementedError,
|
1082
|
+
"optional dimensions are temporarily disabled");
|
1083
|
+
return xnd_error;
|
1084
|
+
}
|
1085
|
+
|
1086
|
+
xnd_t next = *x;
|
1087
|
+
next.type = t->VarDim.type;
|
1088
|
+
|
1089
|
+
next = xnd_multikey(&next, indices+1, len-1, ctx);
|
1090
|
+
if (next.ptr == NULL) {
|
1091
|
+
return xnd_error;
|
1092
|
+
}
|
1093
|
+
|
1094
|
+
slices = ndt_var_add_slice(&nslices, t, start, stop, step, ctx);
|
1095
|
+
if (slices == NULL) {
|
1096
|
+
return xnd_error;
|
1097
|
+
}
|
1098
|
+
|
1099
|
+
xnd_t ret = *x;
|
1100
|
+
ret.type = ndt_var_dim((ndt_t *)next.type,
|
1101
|
+
ExternalOffsets,
|
1102
|
+
t->Concrete.VarDim.noffsets, t->Concrete.VarDim.offsets,
|
1103
|
+
nslices, slices,
|
1104
|
+
ctx);
|
1105
|
+
if (ret.type == NULL) {
|
1106
|
+
return xnd_error;
|
1107
|
+
}
|
1108
|
+
|
1109
|
+
ret.index = next.index;
|
1110
|
+
|
1111
|
+
return ret;
|
1112
|
+
}
|
1113
|
+
|
1114
|
+
case Tuple: {
|
1115
|
+
ndt_err_format(ctx, NDT_NotImplementedError,
|
1116
|
+
"slicing tuples is not supported");
|
1117
|
+
return xnd_error;
|
1118
|
+
}
|
1119
|
+
|
1120
|
+
case Record: {
|
1121
|
+
ndt_err_format(ctx, NDT_NotImplementedError,
|
1122
|
+
"slicing records is not supported");
|
1123
|
+
return xnd_error;
|
1124
|
+
}
|
1125
|
+
|
1126
|
+
default:
|
1127
|
+
ndt_err_format(ctx, NDT_IndexError, "type not sliceable");
|
1128
|
+
return xnd_error;
|
1129
|
+
}
|
1130
|
+
}
|
1131
|
+
|
1132
|
+
xnd_t
|
1133
|
+
xnd_subscript(const xnd_t *x, const xnd_index_t indices[], int len,
|
1134
|
+
ndt_context_t *ctx)
|
1135
|
+
{
|
1136
|
+
bool have_slice = false;
|
1137
|
+
|
1138
|
+
for (int i = 0; i < len; i++) {
|
1139
|
+
if (indices[i].tag == Slice) {
|
1140
|
+
have_slice = true;
|
1141
|
+
break;
|
1142
|
+
}
|
1143
|
+
}
|
1144
|
+
|
1145
|
+
if (have_slice) {
|
1146
|
+
return xnd_multikey(x, indices, len, ctx);
|
1147
|
+
}
|
1148
|
+
else {
|
1149
|
+
xnd_t res = xnd_subtree(x, indices, len, ctx);
|
1150
|
+
const ndt_t *t;
|
1151
|
+
|
1152
|
+
if (res.ptr == NULL) {
|
1153
|
+
return xnd_error;
|
1154
|
+
}
|
1155
|
+
|
1156
|
+
t = ndt_copy(res.type, ctx);
|
1157
|
+
if (t == NULL) {
|
1158
|
+
return xnd_error;
|
1159
|
+
}
|
1160
|
+
|
1161
|
+
res.type = t;
|
1162
|
+
return res;
|
1163
|
+
}
|
1164
|
+
}
|
1165
|
+
|
1166
|
+
|
1167
|
+
/*****************************************************************************/
|
1168
|
+
/* Unstable API */
|
1169
|
+
/*****************************************************************************/
|
1170
|
+
|
1171
|
+
/* error return value */
|
1172
|
+
const xnd_view_t xnd_view_error = {
|
1173
|
+
.flags = 0,
|
1174
|
+
.obj = NULL,
|
1175
|
+
.view = { .bitmap = {.data=NULL, .size=0, .next=NULL},
|
1176
|
+
.index = 0,
|
1177
|
+
.type = NULL,
|
1178
|
+
.ptr = NULL }
|
1179
|
+
};
|
1180
|
+
|
1181
|
+
int
|
1182
|
+
xnd_view_err_occurred(const xnd_view_t *x)
|
1183
|
+
{
|
1184
|
+
return x->view.ptr == NULL;
|
1185
|
+
}
|
1186
|
+
|
1187
|
+
void
|
1188
|
+
xnd_view_clear(xnd_view_t *x)
|
1189
|
+
{
|
1190
|
+
xnd_del_buffer(&x->view, x->flags);
|
1191
|
+
x->flags = 0;
|
1192
|
+
x->obj = NULL;
|
1193
|
+
}
|
1194
|
+
|
1195
|
+
xnd_view_t
|
1196
|
+
xnd_view_from_xnd(const void *obj, const xnd_t *x)
|
1197
|
+
{
|
1198
|
+
xnd_view_t res;
|
1199
|
+
|
1200
|
+
res.flags = 0;
|
1201
|
+
res.obj = obj;
|
1202
|
+
res.view = *x;
|
1203
|
+
|
1204
|
+
return res;
|
1205
|
+
}
|
1206
|
+
|
1207
|
+
xnd_view_t
|
1208
|
+
xnd_view_subscript(const xnd_view_t *x, const xnd_index_t indices[], int len,
|
1209
|
+
ndt_context_t *ctx)
|
1210
|
+
{
|
1211
|
+
xnd_view_t res;
|
1212
|
+
|
1213
|
+
res.flags = XND_OWN_TYPE;
|
1214
|
+
res.obj = x->obj;
|
1215
|
+
|
1216
|
+
res.view = xnd_subscript(&x->view, indices, len, ctx);
|
1217
|
+
if (xnd_err_occurred(&res.view)) {
|
1218
|
+
return xnd_view_error;
|
1219
|
+
}
|
1220
|
+
|
1221
|
+
return res;
|
1222
|
+
}
|
1223
|
+
|
1224
|
+
|
1225
|
+
/*****************************************************************************/
|
1226
|
+
/* Float format */
|
1227
|
+
/*****************************************************************************/
|
1228
|
+
|
1229
|
+
#define IEEE_LITTLE_ENDIAN 0
|
1230
|
+
#define IEEE_BIG_ENDIAN 1
|
1231
|
+
static int xnd_double_format = 0;
|
1232
|
+
static int xnd_float_format = 0;
|
1233
|
+
|
1234
|
+
int
|
1235
|
+
xnd_init_float(ndt_context_t *ctx)
|
1236
|
+
{
|
1237
|
+
double x = 9006104071832581.0;
|
1238
|
+
float y = 16711938.0;
|
1239
|
+
|
1240
|
+
#ifndef _MSC_VER /* Suppress a warning, no need to check on Windows. */
|
1241
|
+
if (sizeof(double) != 8) {
|
1242
|
+
ndt_err_format(ctx, NDT_RuntimeError,
|
1243
|
+
"unsupported platform, need sizeof(double)==8");
|
1244
|
+
return -1;
|
1245
|
+
|
1246
|
+
}
|
1247
|
+
|
1248
|
+
if (sizeof(float) != 4) {
|
1249
|
+
ndt_err_format(ctx, NDT_RuntimeError,
|
1250
|
+
"unsupported platform, need sizeof(float)==4");
|
1251
|
+
return -1;
|
1252
|
+
}
|
1253
|
+
#endif
|
1254
|
+
|
1255
|
+
if (memcmp(&x, "\x43\x3f\xff\x01\x02\x03\x04\x05", 8) == 0) {
|
1256
|
+
xnd_double_format = IEEE_BIG_ENDIAN;
|
1257
|
+
}
|
1258
|
+
else if (memcmp(&x, "\x05\x04\x03\x02\x01\xff\x3f\x43", 8) == 0) {
|
1259
|
+
xnd_double_format = IEEE_LITTLE_ENDIAN;
|
1260
|
+
}
|
1261
|
+
else {
|
1262
|
+
ndt_err_format(ctx, NDT_RuntimeError,
|
1263
|
+
"unsupported platform, could not detect double endianness");
|
1264
|
+
return -1;
|
1265
|
+
}
|
1266
|
+
|
1267
|
+
if (memcmp(&y, "\x4b\x7f\x01\x02", 4) == 0) {
|
1268
|
+
xnd_float_format = IEEE_BIG_ENDIAN;
|
1269
|
+
}
|
1270
|
+
else if (memcmp(&y, "\x02\x01\x7f\x4b", 4) == 0) {
|
1271
|
+
xnd_float_format = IEEE_LITTLE_ENDIAN;
|
1272
|
+
}
|
1273
|
+
else {
|
1274
|
+
ndt_err_format(ctx, NDT_RuntimeError,
|
1275
|
+
"unsupported platform, could not detect float endianness");
|
1276
|
+
return -1;
|
1277
|
+
}
|
1278
|
+
|
1279
|
+
return 0;
|
1280
|
+
}
|
1281
|
+
|
1282
|
+
bool
|
1283
|
+
xnd_float_is_little_endian(void)
|
1284
|
+
{
|
1285
|
+
return xnd_float_format==IEEE_LITTLE_ENDIAN;
|
1286
|
+
}
|
1287
|
+
|
1288
|
+
bool
|
1289
|
+
xnd_float_is_big_endian(void)
|
1290
|
+
{
|
1291
|
+
return xnd_float_format==IEEE_BIG_ENDIAN;
|
1292
|
+
}
|
1293
|
+
|
1294
|
+
bool
|
1295
|
+
xnd_double_is_little_endian(void)
|
1296
|
+
{
|
1297
|
+
return xnd_double_format==IEEE_LITTLE_ENDIAN;
|
1298
|
+
}
|
1299
|
+
|
1300
|
+
bool
|
1301
|
+
xnd_double_is_big_endian(void)
|
1302
|
+
{
|
1303
|
+
return xnd_double_format==IEEE_BIG_ENDIAN;
|
1304
|
+
}
|