yinspire 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README +24 -0
- data/bench/pq/Makefile +5 -0
- data/bench/pq/bench.cc +321 -0
- data/bench/pq/bench.rb +125 -0
- data/bench/pq/bench_binaryheap.h +46 -0
- data/bench/pq/bench_calendarqueue.h +58 -0
- data/bench/pq/bench_pairingheap.h +61 -0
- data/bench/pq/bench_stlpq.h +46 -0
- data/bench/pq/benchmark.h +225 -0
- data/bench/pq/distribution.h +93 -0
- data/bench/pq/make.rb +24 -0
- data/bin/yinspire +186 -0
- data/examples/nets/gereon2005.c.json +93723 -0
- data/examples/nets/gereon2005.yin +232650 -0
- data/examples/nets/skorpion.graphml +396 -0
- data/examples/nets/spiketrains_angle_180.txt +8 -0
- data/lib/Algorithms/Array.h +52 -0
- data/lib/Algorithms/BinaryHeap.h +265 -0
- data/lib/Algorithms/CalendarQueue.h +257 -0
- data/lib/Algorithms/IndexedBinaryHeap.h +90 -0
- data/lib/Algorithms/PairingHeap.h +169 -0
- data/lib/Allocators/ChunkedFreelistAllocator.h +96 -0
- data/lib/Allocators/MemoryAllocator.h +45 -0
- data/lib/Allocators/RubyMemoryAllocator.h +37 -0
- data/lib/Yinspire.rb +69 -0
- data/lib/Yinspire/All.rb +10 -0
- data/lib/Yinspire/Core/NeuralEntity.rb +133 -0
- data/lib/Yinspire/Core/Neuron.rb +162 -0
- data/lib/Yinspire/Core/Scheduling/NeuralEntity.rb +123 -0
- data/lib/Yinspire/Core/Scheduling/Simulator.rb +94 -0
- data/lib/Yinspire/Core/Simulator.rb +36 -0
- data/lib/Yinspire/Core/StimuliMixin.rb +103 -0
- data/lib/Yinspire/Core/Stimulus.rb +25 -0
- data/lib/Yinspire/Core/Synapse.rb +64 -0
- data/lib/Yinspire/Dumpers/Dumper.rb +19 -0
- data/lib/Yinspire/Dumpers/Dumper_Dot.rb +28 -0
- data/lib/Yinspire/Loaders/GraphML.rb +84 -0
- data/lib/Yinspire/Loaders/Loader.rb +31 -0
- data/lib/Yinspire/Loaders/Loader_GraphML.rb +97 -0
- data/lib/Yinspire/Loaders/Loader_JSON.rb +181 -0
- data/lib/Yinspire/Loaders/Loader_Spike.rb +42 -0
- data/lib/Yinspire/Loaders/Loader_Yin.rb +62 -0
- data/lib/Yinspire/Loaders/YinScanner.rb +247 -0
- data/lib/Yinspire/Models/Neuron_Base.rb +38 -0
- data/lib/Yinspire/Models/Neuron_Input.rb +12 -0
- data/lib/Yinspire/Models/Neuron_InputOutput.rb +39 -0
- data/lib/Yinspire/Models/Neuron_Output.rb +15 -0
- data/lib/Yinspire/Models/Neuron_SRM01.rb +50 -0
- data/lib/Yinspire/Models/Neuron_SRM02.rb +64 -0
- data/lib/Yinspire/Models/Synapse_Hebb.rb +67 -0
- data/pure_cpp/Makefile +22 -0
- data/pure_cpp/README +2 -0
- data/pure_cpp/src/algo/binary_heap.h +277 -0
- data/pure_cpp/src/algo/indexed_binary_heap.h +90 -0
- data/pure_cpp/src/json/json.cc +542 -0
- data/pure_cpp/src/json/json.h +182 -0
- data/pure_cpp/src/json/json_parser.cc +685 -0
- data/pure_cpp/src/json/json_parser.h +15 -0
- data/pure_cpp/src/json/json_parser.rl +213 -0
- data/pure_cpp/src/main.cc +49 -0
- data/pure_cpp/src/memory_allocator.h +45 -0
- data/pure_cpp/src/neural_entity.cc +208 -0
- data/pure_cpp/src/neural_entity.h +243 -0
- data/pure_cpp/src/neuron.cc +136 -0
- data/pure_cpp/src/neuron.h +70 -0
- data/pure_cpp/src/neuron_srm_01.cc +77 -0
- data/pure_cpp/src/neuron_srm_01.h +36 -0
- data/pure_cpp/src/simulator.cc +151 -0
- data/pure_cpp/src/simulator.h +116 -0
- data/pure_cpp/src/synapse.cc +117 -0
- data/pure_cpp/src/synapse.h +60 -0
- data/pure_cpp/src/types.h +18 -0
- data/run.rb +68 -0
- data/tools/conv_jsonc_to_yin.rb +165 -0
- data/tools/converter.rb +93 -0
- data/tools/json_writer.rb +122 -0
- data/yinspire.gemspec +20 -0
- metadata +156 -0
@@ -0,0 +1,90 @@
|
|
1
|
+
/*
|
2
|
+
* An Indexed Binary Heap
|
3
|
+
*
|
4
|
+
* Copyright (c) 2007, 2008 by Michael Neumann (mneumann@ntecs.de)
|
5
|
+
*
|
6
|
+
* The Indexed Binary Heap keeps track of the indices of it's elements
|
7
|
+
* stored in the heap to allow efficient updating of their priorities.
|
8
|
+
*
|
9
|
+
* The requirement was to modify an elements priority. In a regular
|
10
|
+
* implicit binary heap this is an inefficient operation as the element
|
11
|
+
* has to be found prior to modifying it's priority. And finding an
|
12
|
+
* element is O(n) in an implicit binary heap due to it's non sorted
|
13
|
+
* nature (i.e. can't apply binary search). By keeping track of the
|
14
|
+
* elements index and storing this value inside the elements structure,
|
15
|
+
* the complexity of modifying an elements priority is reduced to
|
16
|
+
* O(log n) in the worst-case!
|
17
|
+
*
|
18
|
+
* NOTE: Index 0 of the elements array is unused. It's the index that
|
19
|
+
* should be used to denote that an element is NOT actually present in
|
20
|
+
* the binary heap.
|
21
|
+
*
|
22
|
+
* See documentation of BinaryHeap as well.
|
23
|
+
*
|
24
|
+
* Example:
|
25
|
+
*
|
26
|
+
* struct E
|
27
|
+
* {
|
28
|
+
* float schedule_at;
|
29
|
+
* unsigned int schedule_index;
|
30
|
+
*
|
31
|
+
* inline static bool less(const E& e1, const E& e2)
|
32
|
+
* {
|
33
|
+
* return e1.schedule_at < e2.schedule_at;
|
34
|
+
* }
|
35
|
+
*
|
36
|
+
* inline static unsigned int& index(const E& e)
|
37
|
+
* {
|
38
|
+
* return e.schedule_index;
|
39
|
+
* }
|
40
|
+
* };
|
41
|
+
*
|
42
|
+
* IndexedBinaryHeap<E, MemoryAllocator> heap;
|
43
|
+
* ...
|
44
|
+
*
|
45
|
+
*/
|
46
|
+
|
47
|
+
#ifndef __YINSPIRE__INDEXED_BINARY_HEAP__
|
48
|
+
#define __YINSPIRE__INDEXED_BINARY_HEAP__
|
49
|
+
|
50
|
+
#include "BinaryHeap.h"
|
51
|
+
|
52
|
+
template <typename E, class Acc=E>
|
53
|
+
struct BinaryHeapIndexer
|
54
|
+
{
|
55
|
+
static inline void index_changed(E& e, unsigned int i)
|
56
|
+
{
|
57
|
+
Acc::index(e) = i;
|
58
|
+
}
|
59
|
+
};
|
60
|
+
|
61
|
+
|
62
|
+
template <typename E, class Alloc, class Acc=E, unsigned int MIN_CAPA=1024>
|
63
|
+
class IndexedBinaryHeap : public BinaryHeap<E, Alloc, Acc, BinaryHeapIndexer<E, Acc>, MIN_CAPA>
|
64
|
+
{
|
65
|
+
typedef unsigned int I; // index type
|
66
|
+
typedef BinaryHeap<E, Alloc, Acc, BinaryHeapIndexer<E, Acc>, MIN_CAPA> super;
|
67
|
+
typedef BinaryHeapIndexer<E, Acc> Idx;
|
68
|
+
|
69
|
+
public:
|
70
|
+
|
71
|
+
void
|
72
|
+
update(const E& element)
|
73
|
+
{
|
74
|
+
I i = Acc::index(element);
|
75
|
+
if (i == 0)
|
76
|
+
{
|
77
|
+
super::push(element);
|
78
|
+
}
|
79
|
+
else
|
80
|
+
{
|
81
|
+
// FIXME: use propagate up/down instead
|
82
|
+
Idx::index_changed(this->elements[i], 0); // detach from heap
|
83
|
+
I bubble = super::move_bubble_down(i);
|
84
|
+
super::insert_and_bubble_up(bubble, element);
|
85
|
+
}
|
86
|
+
}
|
87
|
+
|
88
|
+
};
|
89
|
+
|
90
|
+
#endif
|
@@ -0,0 +1,169 @@
|
|
1
|
+
/*
|
2
|
+
* Implementation of a Pairing Heap
|
3
|
+
*
|
4
|
+
* Copyright (c) 2007, 2008 by Michael Neumann (mneumann@ntecs.de)
|
5
|
+
*
|
6
|
+
* Requirements for "ACC":
|
7
|
+
*
|
8
|
+
* struct ACC {
|
9
|
+
* inline static bool less(const T*, const T*);
|
10
|
+
* inline static T*& next(T*);
|
11
|
+
* inline static T*& previous(T*);
|
12
|
+
* inline static T*& child(T*);
|
13
|
+
* }
|
14
|
+
*
|
15
|
+
*/
|
16
|
+
|
17
|
+
#ifndef __YINSPIRE__PAIRING_HEAP__
|
18
|
+
#define __YINSPIRE__PAIRING_HEAP__
|
19
|
+
|
20
|
+
template <class T, class ACC=T>
|
21
|
+
class PairingHeap
|
22
|
+
{
|
23
|
+
public:
|
24
|
+
|
25
|
+
PairingHeap()
|
26
|
+
{
|
27
|
+
this->root = NULL;
|
28
|
+
this->size_ = 0;
|
29
|
+
}
|
30
|
+
|
31
|
+
bool
|
32
|
+
empty() const
|
33
|
+
{
|
34
|
+
return (this->root == NULL);
|
35
|
+
}
|
36
|
+
|
37
|
+
unsigned int
|
38
|
+
size() const
|
39
|
+
{
|
40
|
+
return this->size_;
|
41
|
+
}
|
42
|
+
|
43
|
+
T*
|
44
|
+
top() const
|
45
|
+
{
|
46
|
+
return this->root;
|
47
|
+
}
|
48
|
+
|
49
|
+
void
|
50
|
+
pop()
|
51
|
+
{
|
52
|
+
T* current;
|
53
|
+
T* last;
|
54
|
+
T* r1;
|
55
|
+
T* r2;
|
56
|
+
|
57
|
+
--this->size_;
|
58
|
+
|
59
|
+
if (ACC::child(this->root) != NULL)
|
60
|
+
{
|
61
|
+
// remove parent pointer from left-most child node
|
62
|
+
ACC::previous(ACC::child(this->root)) = NULL;
|
63
|
+
|
64
|
+
current = ACC::child(this->root);
|
65
|
+
last = NULL;
|
66
|
+
|
67
|
+
// left-to-right pass
|
68
|
+
while (current != NULL)
|
69
|
+
{
|
70
|
+
r1 = current;
|
71
|
+
r2 = ACC::next(current);
|
72
|
+
|
73
|
+
if (r2 == NULL)
|
74
|
+
{
|
75
|
+
// no need to "meld", because we're at the end
|
76
|
+
ACC::previous(r1) = last;
|
77
|
+
last = current;
|
78
|
+
current = NULL;
|
79
|
+
}
|
80
|
+
else
|
81
|
+
{
|
82
|
+
current = ACC::next(r2);
|
83
|
+
|
84
|
+
ACC::next(r1) = NULL;
|
85
|
+
ACC::previous(r1) = NULL;
|
86
|
+
ACC::next(r2) = NULL;
|
87
|
+
ACC::previous(r2) = NULL;
|
88
|
+
|
89
|
+
r1 = meld(r1, r2);
|
90
|
+
ACC::previous(r1) = last;
|
91
|
+
last = r1;
|
92
|
+
}
|
93
|
+
}
|
94
|
+
|
95
|
+
this->root = last;
|
96
|
+
current = ACC::previous(last);
|
97
|
+
|
98
|
+
// make it a clean root-node
|
99
|
+
ACC::next(this->root) = NULL;
|
100
|
+
ACC::previous(this->root) = NULL;
|
101
|
+
|
102
|
+
// right-to-left pass
|
103
|
+
while (current != NULL)
|
104
|
+
{
|
105
|
+
r2 = current;
|
106
|
+
|
107
|
+
current = ACC::previous(r2);
|
108
|
+
ACC::previous(r2) = NULL; // make it a clean root-node
|
109
|
+
|
110
|
+
this->root = meld(this->root, r2);
|
111
|
+
}
|
112
|
+
}
|
113
|
+
else
|
114
|
+
{
|
115
|
+
this->root = NULL;
|
116
|
+
}
|
117
|
+
}
|
118
|
+
|
119
|
+
/*
|
120
|
+
* Insert a new element into the Pairing Heap.
|
121
|
+
*/
|
122
|
+
void
|
123
|
+
push(T* node)
|
124
|
+
{
|
125
|
+
ACC::next(node) = NULL;
|
126
|
+
ACC::previous(node) = NULL; // == NULL means it's a root node
|
127
|
+
ACC::child(node) = NULL;
|
128
|
+
|
129
|
+
++this->size_;
|
130
|
+
|
131
|
+
this->root = empty() ? node : meld(this->root, node);
|
132
|
+
}
|
133
|
+
|
134
|
+
private:
|
135
|
+
|
136
|
+
/*
|
137
|
+
* Meld two Pairing Heaps
|
138
|
+
*/
|
139
|
+
T*
|
140
|
+
meld(T* root1, T* root2)
|
141
|
+
{
|
142
|
+
if (ACC::less(root2, root1))
|
143
|
+
{
|
144
|
+
T* tmp = root1;
|
145
|
+
root1 = root2;
|
146
|
+
root2 = tmp;
|
147
|
+
}
|
148
|
+
|
149
|
+
// root2 becomes the leftmost node of root1
|
150
|
+
ACC::previous(root2) = root2; // "parent" pointer for leftmost child
|
151
|
+
ACC::next(root2) = ACC::child(root1);
|
152
|
+
|
153
|
+
if (ACC::child(root1) != NULL)
|
154
|
+
{
|
155
|
+
// assign double linked-list
|
156
|
+
ACC::previous(ACC::child(root1)) = root2;
|
157
|
+
}
|
158
|
+
ACC::child(root1) = root2;
|
159
|
+
return root1;
|
160
|
+
}
|
161
|
+
|
162
|
+
private:
|
163
|
+
|
164
|
+
T* root;
|
165
|
+
unsigned int size_;
|
166
|
+
|
167
|
+
};
|
168
|
+
|
169
|
+
#endif
|
@@ -0,0 +1,96 @@
|
|
1
|
+
/*
|
2
|
+
* A chunked freelist Allocator
|
3
|
+
*
|
4
|
+
* Copyright (c) 2007, 2008 by Michael Neumann (mneumann@ntecs.de)
|
5
|
+
*
|
6
|
+
* Memory is allocated in chunks of size +chunk_size+. Chunks are never
|
7
|
+
* freed except when the destructor is called.
|
8
|
+
*/
|
9
|
+
|
10
|
+
#ifndef __YINSPIRE__CHUNKED_FREELIST_ALLOCATOR__
|
11
|
+
#define __YINSPIRE__CHUNKED_FREELIST_ALLOCATOR__
|
12
|
+
|
13
|
+
#include <assert.h>
|
14
|
+
|
15
|
+
template <class T, class ACC = T>
|
16
|
+
class ChunkedFreelistAllocator
|
17
|
+
{
|
18
|
+
|
19
|
+
template <class TT>
|
20
|
+
struct Chunk
|
21
|
+
{
|
22
|
+
Chunk<TT> *next_chunk;
|
23
|
+
TT *array;
|
24
|
+
};
|
25
|
+
|
26
|
+
public:
|
27
|
+
|
28
|
+
ChunkedFreelistAllocator(unsigned int chunksize)
|
29
|
+
{
|
30
|
+
this->freelist = NULL;
|
31
|
+
this->chunklist = NULL;
|
32
|
+
this->chunksize = chunksize;
|
33
|
+
}
|
34
|
+
|
35
|
+
T*
|
36
|
+
allocate()
|
37
|
+
{
|
38
|
+
|
39
|
+
// alloc new chunk if no more free elements are available
|
40
|
+
if (this->freelist == NULL) alloc_chunk();
|
41
|
+
|
42
|
+
assert(this->freelist != NULL);
|
43
|
+
|
44
|
+
T* e = this->freelist;
|
45
|
+
this->freelist = ACC::next(e);
|
46
|
+
ACC::next(e) = NULL;
|
47
|
+
|
48
|
+
return e;
|
49
|
+
}
|
50
|
+
|
51
|
+
void
|
52
|
+
free(T* e)
|
53
|
+
{
|
54
|
+
//assert(ACC::next(e) == NULL);
|
55
|
+
ACC::next(e) = this->freelist;
|
56
|
+
this->freelist = e;
|
57
|
+
}
|
58
|
+
|
59
|
+
void
|
60
|
+
free_list(T* first, T* last)
|
61
|
+
{
|
62
|
+
assert(last != NULL);
|
63
|
+
//assert(ACC::next(first) == NULL);
|
64
|
+
ACC::next(last) = this->freelist;
|
65
|
+
this->freelist = first;
|
66
|
+
}
|
67
|
+
|
68
|
+
protected:
|
69
|
+
|
70
|
+
void
|
71
|
+
alloc_chunk()
|
72
|
+
{
|
73
|
+
Chunk<T> *new_chunk = new Chunk<T>;
|
74
|
+
new_chunk->next_chunk = this->chunklist;
|
75
|
+
this->chunklist = new_chunk;
|
76
|
+
|
77
|
+
new_chunk->array = new T[this->chunksize];
|
78
|
+
|
79
|
+
// put all elements of new chunk on freelist
|
80
|
+
for (unsigned int i=0; i<this->chunksize-1; i++)
|
81
|
+
{
|
82
|
+
ACC::next(&new_chunk->array[i]) = &new_chunk->array[i+1];
|
83
|
+
}
|
84
|
+
|
85
|
+
ACC::next(&new_chunk->array[this->chunksize-1]) = this->freelist;
|
86
|
+
this->freelist = &new_chunk->array[0];
|
87
|
+
}
|
88
|
+
|
89
|
+
private:
|
90
|
+
|
91
|
+
T *freelist;
|
92
|
+
Chunk<T> *chunklist;
|
93
|
+
unsigned int chunksize;
|
94
|
+
};
|
95
|
+
|
96
|
+
#endif
|
@@ -0,0 +1,45 @@
|
|
1
|
+
#ifndef __YINSPIRE__MEMORY_ALLOCATOR__
|
2
|
+
#define __YINSPIRE__MEMORY_ALLOCATOR__
|
3
|
+
|
4
|
+
#include <stdlib.h>
|
5
|
+
|
6
|
+
/*
|
7
|
+
* Provides some basic utility functions for allocating and releasing
|
8
|
+
* memory.
|
9
|
+
*/
|
10
|
+
template <typename T>
|
11
|
+
class MemoryAllocator
|
12
|
+
{
|
13
|
+
public:
|
14
|
+
|
15
|
+
inline static T*
|
16
|
+
alloc_n(size_t n)
|
17
|
+
{
|
18
|
+
T* ptr = (T*) calloc(n, sizeof(T));
|
19
|
+
if (ptr == NULL)
|
20
|
+
{
|
21
|
+
throw "memory allocation failed";
|
22
|
+
}
|
23
|
+
return ptr;
|
24
|
+
}
|
25
|
+
|
26
|
+
inline static T*
|
27
|
+
realloc_n(T* old_ptr, size_t n)
|
28
|
+
{
|
29
|
+
T* ptr = (T*) realloc(old_ptr, sizeof(T)*n);
|
30
|
+
if (ptr == NULL)
|
31
|
+
{
|
32
|
+
throw "memory allocation failed";
|
33
|
+
}
|
34
|
+
return ptr;
|
35
|
+
}
|
36
|
+
|
37
|
+
inline static void
|
38
|
+
free(T* ptr)
|
39
|
+
{
|
40
|
+
::free(ptr);
|
41
|
+
}
|
42
|
+
|
43
|
+
};
|
44
|
+
|
45
|
+
#endif
|
@@ -0,0 +1,37 @@
|
|
1
|
+
#ifndef __YINSPIRE__RUBY_MEMORY_ALLOCATOR__
|
2
|
+
#define __YINSPIRE__RUBY_MEMORY_ALLOCATOR__
|
3
|
+
|
4
|
+
#include "ruby.h"
|
5
|
+
#include <stdlib.h>
|
6
|
+
|
7
|
+
/*
|
8
|
+
* Provides some basic utility functions for allocating and releasing
|
9
|
+
* memory.
|
10
|
+
*/
|
11
|
+
template <typename T>
|
12
|
+
class MemoryAllocator
|
13
|
+
{
|
14
|
+
public:
|
15
|
+
|
16
|
+
inline static T*
|
17
|
+
alloc_n(size_t n)
|
18
|
+
{
|
19
|
+
return ALLOC_N(T, n);
|
20
|
+
}
|
21
|
+
|
22
|
+
inline static T*
|
23
|
+
realloc_n(T* old_ptr, size_t n)
|
24
|
+
{
|
25
|
+
REALLOC_N(old_ptr, T, n);
|
26
|
+
return old_ptr;
|
27
|
+
}
|
28
|
+
|
29
|
+
inline static void
|
30
|
+
free(T* ptr)
|
31
|
+
{
|
32
|
+
::free(ptr);
|
33
|
+
}
|
34
|
+
|
35
|
+
};
|
36
|
+
|
37
|
+
#endif
|
data/lib/Yinspire.rb
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
require 'cplus2ruby'
|
2
|
+
|
3
|
+
Cplus2Ruby.add_type_alias 'real' => 'float'
|
4
|
+
Cplus2Ruby.add_type_alias 'simtime' => 'float'
|
5
|
+
Cplus2Ruby.add_type_alias 'uint' => 'unsigned int'
|
6
|
+
Cplus2Ruby.settings :default_body_when_nil => 'THROW("abstract method");'
|
7
|
+
|
8
|
+
Infinity = 1.0/0.0
|
9
|
+
|
10
|
+
Cplus2Ruby << %{
|
11
|
+
#include <assert.h>
|
12
|
+
#include <math.h>
|
13
|
+
#include "Algorithms/Array.h"
|
14
|
+
#include "Algorithms/BinaryHeap.h"
|
15
|
+
#include "Algorithms/IndexedBinaryHeap.h"
|
16
|
+
#include "Allocators/RubyMemoryAllocator.h"
|
17
|
+
|
18
|
+
#define real_exp expf
|
19
|
+
#define real_fabs fabsf
|
20
|
+
|
21
|
+
#define THROW(str) rb_raise(rb_eRuntimeError, str)
|
22
|
+
|
23
|
+
#define MIN(a,b) ((a) < (b) ? (a) : (b))
|
24
|
+
#define MAX(a,b) ((a) > (b) ? (a) : (b))
|
25
|
+
|
26
|
+
#define Infinity INFINITY
|
27
|
+
}
|
28
|
+
|
29
|
+
def assert(cond)
|
30
|
+
raise "assertion failed" unless cond
|
31
|
+
end
|
32
|
+
|
33
|
+
class Simulator; cplus2ruby end
|
34
|
+
class NeuralEntity; cplus2ruby end
|
35
|
+
|
36
|
+
# Forward declarations
|
37
|
+
class Neuron < NeuralEntity; end
|
38
|
+
class Synapse < NeuralEntity; end
|
39
|
+
|
40
|
+
require 'Yinspire/Core/Simulator'
|
41
|
+
require 'Yinspire/Core/NeuralEntity'
|
42
|
+
require 'Yinspire/Core/Neuron'
|
43
|
+
require 'Yinspire/Core/Synapse'
|
44
|
+
|
45
|
+
module Yinspire
|
46
|
+
ROOT = File.expand_path(File.join(File.dirname(__FILE__), ".."))
|
47
|
+
LIB_DIR = File.expand_path(File.dirname(__FILE__))
|
48
|
+
|
49
|
+
def self.commit(file, force_compilation=false)
|
50
|
+
cflags = "-DNDEBUG -O3 -fomit-frame-pointer -Winline -Wall -I#{LIB_DIR} -I${PWD}"
|
51
|
+
ldflags = ""
|
52
|
+
Cplus2Ruby.commit(file, force_compilation, cflags, ldflags)
|
53
|
+
|
54
|
+
Cplus2Ruby.model.entities.each do |klass|
|
55
|
+
next unless klass.ancestors.include?(NeuralEntity)
|
56
|
+
NeuralEntity.entity_type_map[klass.name] = klass
|
57
|
+
NeuralEntity.entity_type_map_reverse[klass] = klass.name
|
58
|
+
lc = NeuralEntity.entity_ann_load_cache[klass] = Hash.new
|
59
|
+
dc = NeuralEntity.entity_ann_dump_cache[klass] = Array.new
|
60
|
+
|
61
|
+
klass.recursive_annotations.each {|name, h|
|
62
|
+
next unless h[:marshal]
|
63
|
+
lc[name.to_sym] = lc[name.to_s] = :"#{name}="
|
64
|
+
dc << name.to_sym
|
65
|
+
}
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
end # module Yinspire
|