libmf 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +3 -0
- data/LICENSE.txt +22 -0
- data/README.md +125 -0
- data/ext/libmf/extconf.rb +18 -0
- data/lib/libmf.bundle +0 -0
- data/lib/libmf.rb +26 -0
- data/lib/libmf/ffi.rb +62 -0
- data/lib/libmf/model.rb +112 -0
- data/lib/libmf/version.rb +3 -0
- data/vendor/libmf/COPYRIGHT +31 -0
- data/vendor/libmf/Makefile +34 -0
- data/vendor/libmf/Makefile.win +36 -0
- data/vendor/libmf/README +637 -0
- data/vendor/libmf/demo/all_one_matrix.te.txt +1382 -0
- data/vendor/libmf/demo/all_one_matrix.tr.txt +5172 -0
- data/vendor/libmf/demo/binary_matrix.te.txt +1312 -0
- data/vendor/libmf/demo/binary_matrix.tr.txt +4937 -0
- data/vendor/libmf/demo/demo.bat +40 -0
- data/vendor/libmf/demo/demo.sh +58 -0
- data/vendor/libmf/demo/real_matrix.te.txt +794 -0
- data/vendor/libmf/demo/real_matrix.tr.txt +5000 -0
- data/vendor/libmf/mf-predict.cpp +207 -0
- data/vendor/libmf/mf-train.cpp +378 -0
- data/vendor/libmf/mf.cpp +4683 -0
- data/vendor/libmf/mf.def +21 -0
- data/vendor/libmf/mf.h +130 -0
- data/vendor/libmf/windows/mf-predict.exe +0 -0
- data/vendor/libmf/windows/mf-train.exe +0 -0
- data/vendor/libmf/windows/mf.dll +0 -0
- metadata +142 -0
@@ -0,0 +1,40 @@
|
|
1
|
+
SET train=..\windows\mf-train
|
2
|
+
SET predict=..\windows\mf-predict
|
3
|
+
|
4
|
+
::#########################################################################
|
5
|
+
:: Real-valued matrix factorization (RVMF)
|
6
|
+
::#########################################################################
|
7
|
+
echo "Real-valued matrix factorization"
|
8
|
+
:: In-memory training with holdout valudation
|
9
|
+
%train% -f 0 -l2 0.05 -k 100 -t 10 -p real_matrix.te.txt real_matrix.tr.txt rvmf_model.txt
|
10
|
+
:: Do prediction and show MAE
|
11
|
+
%predict% -e 1 real_matrix.te.txt rvmf_model.txt rvmf_output.txt
|
12
|
+
|
13
|
+
::#########################################################################
|
14
|
+
:: Binary matrix factorization (BMF)
|
15
|
+
::#########################################################################
|
16
|
+
echo "binary matrix factorization"
|
17
|
+
:: In-memory training with holdout valudation
|
18
|
+
%train% -f 5 -l2 0.01 -k 64 -p binary_matrix.te.txt binary_matrix.tr.txt bmf_model.txt
|
19
|
+
:: Do prediction and show accuracy
|
20
|
+
%predict% -e 6 binary_matrix.te.txt bmf_model.txt bmf_output.txt
|
21
|
+
|
22
|
+
::#########################################################################
|
23
|
+
:: One-class matrix factorization (OCMF)
|
24
|
+
::#########################################################################
|
25
|
+
echo "one-class matrix factorization using a stochastic gradient method"
|
26
|
+
:: In-memory training with holdout validation
|
27
|
+
%train% -f 10 -l2 0.01 -k 32 -p all_one_matrix.te.txt all_one_matrix.tr.txt ocmf_model.txt
|
28
|
+
:: Do prediction and show row-oriented MPR
|
29
|
+
%predict% -e 10 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
30
|
+
:: Do prediction and show row-oriented AUC
|
31
|
+
%predict% -e 12 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
32
|
+
|
33
|
+
echo "one-class matrix factorization using a coordinate descent method"
|
34
|
+
:: In-memory training with holdout validation
|
35
|
+
%train% -f 12 -l2 0.01 -k 32 -a 0.001 -c 0.0001 -p all_one_matrix.te.txt all_one_matrix.tr.txt ocmf_model.txt
|
36
|
+
:: Do prediction and show row-oriented MPR
|
37
|
+
%predict% -e 10 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
38
|
+
:: Do prediction and show row-oriented AUC
|
39
|
+
%predict% -e 12 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
40
|
+
)
|
@@ -0,0 +1,58 @@
|
|
1
|
+
#!/bin/sh
|
2
|
+
train=../mf-train
|
3
|
+
predict=../mf-predict
|
4
|
+
|
5
|
+
##########################################################################
|
6
|
+
# Build package if no binary found and this script is exectuted via the
|
7
|
+
# following command.
|
8
|
+
# libmf/demo > sh demo.sh
|
9
|
+
##########################################################################
|
10
|
+
if [ ! -s $train ] || [ ! -s $predict ]
|
11
|
+
then
|
12
|
+
(cd .. && make)
|
13
|
+
fi
|
14
|
+
|
15
|
+
##########################################################################
|
16
|
+
# Real-valued matrix factorization (RVMF)
|
17
|
+
##########################################################################
|
18
|
+
echo "--------------------------------"
|
19
|
+
echo "Real-valued matrix factorization"
|
20
|
+
echo "--------------------------------"
|
21
|
+
# In-memory training with holdout valudation
|
22
|
+
$train -f 0 -l2 0.05 -k 100 -t 10 -p real_matrix.te.txt real_matrix.tr.txt rvmf_model.txt
|
23
|
+
# Do prediction and show MAE
|
24
|
+
$predict -e 1 real_matrix.te.txt rvmf_model.txt rvmf_output.txt
|
25
|
+
|
26
|
+
##########################################################################
|
27
|
+
# Binary matrix factorization (BMF)
|
28
|
+
##########################################################################
|
29
|
+
echo "---------------------------"
|
30
|
+
echo "binary matrix factorization"
|
31
|
+
echo "---------------------------"
|
32
|
+
# In-memory training with holdout valudation
|
33
|
+
$train -f 5 -l2 0.01 -k 64 -p binary_matrix.te.txt binary_matrix.tr.txt bmf_model.txt
|
34
|
+
# Do prediction and show accuracy
|
35
|
+
$predict -e 6 binary_matrix.te.txt bmf_model.txt bmf_output.txt
|
36
|
+
|
37
|
+
##########################################################################
|
38
|
+
# One-class matrix factorization (OCMF)
|
39
|
+
##########################################################################
|
40
|
+
echo "-----------------------------------------------------------------"
|
41
|
+
echo "one-class matrix factorization using a stochastic gradient method"
|
42
|
+
echo "-----------------------------------------------------------------"
|
43
|
+
# In-memory training with holdout validation
|
44
|
+
$train -f 10 -l2 0.01 -k 32 -p all_one_matrix.te.txt all_one_matrix.tr.txt ocmf_model.txt
|
45
|
+
# Do prediction and show row-oriented MPR
|
46
|
+
$predict -e 10 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
47
|
+
# Do prediction and show row-oriented AUC
|
48
|
+
$predict -e 12 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
49
|
+
|
50
|
+
echo "----------------------------------------------------------------"
|
51
|
+
echo "one-class matrix factorization using a coordinate descent method"
|
52
|
+
echo "----------------------------------------------------------------"
|
53
|
+
# In-memory training with holdout validation
|
54
|
+
$train -f 12 -l2 0.01 -k 32 -a 0.001 -c 0.0001 -p all_one_matrix.te.txt all_one_matrix.tr.txt ocmf_model.txt
|
55
|
+
# Do prediction and show row-oriented MPR
|
56
|
+
$predict -e 10 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
57
|
+
# Do prediction and show row-oriented AUC
|
58
|
+
$predict -e 12 all_one_matrix.te.txt ocmf_model.txt ocmf_output.txt
|
@@ -0,0 +1,794 @@
|
|
1
|
+
1040 128 3.5
|
2
|
+
967 1 2.1
|
3
|
+
392 17 4.1
|
4
|
+
406 6 3.1
|
5
|
+
1333 20 5.1
|
6
|
+
2168 45 4.0
|
7
|
+
923 26 4.5
|
8
|
+
2004 13 4.0
|
9
|
+
92 155 4.0
|
10
|
+
2019 45 3.0
|
11
|
+
1737 1 4.0
|
12
|
+
414 187 3.0
|
13
|
+
233 6 3.1
|
14
|
+
1026 61 1.1
|
15
|
+
1253 836 2.6
|
16
|
+
1270 41 4.0
|
17
|
+
2148 45 4.0
|
18
|
+
1200 1227 2.1
|
19
|
+
1523 175 3.0
|
20
|
+
1820 147 5.1
|
21
|
+
899 777 3.0
|
22
|
+
1284 81 3.6
|
23
|
+
77 7 4.0
|
24
|
+
1082 836 4.1
|
25
|
+
587 9 4.0
|
26
|
+
785 7 5.1
|
27
|
+
181 45 4.0
|
28
|
+
1185 128 5.0
|
29
|
+
2004 17 4.1
|
30
|
+
1617 7 3.1
|
31
|
+
1839 72 4.1
|
32
|
+
1527 20 5.1
|
33
|
+
241 100 2.0
|
34
|
+
645 17 4.0
|
35
|
+
295 43 2.0
|
36
|
+
1616 17 3.0
|
37
|
+
1995 45 3.0
|
38
|
+
2200 27 3.1
|
39
|
+
1896 1 5.1
|
40
|
+
332 27 4.0
|
41
|
+
734 1 4.1
|
42
|
+
1696 25 4.0
|
43
|
+
1202 7 4.0
|
44
|
+
1567 2 4.0
|
45
|
+
319 1 4.1
|
46
|
+
505 42 3.1
|
47
|
+
156 43 4.6
|
48
|
+
570 65 3.1
|
49
|
+
1722 5 3.1
|
50
|
+
947 25 4.1
|
51
|
+
1357 555 3.1
|
52
|
+
889 41 3.1
|
53
|
+
1809 43 2.1
|
54
|
+
899 95 4.1
|
55
|
+
465 5 4.1
|
56
|
+
193 990 5.0
|
57
|
+
825 3 5.0
|
58
|
+
1284 25 3.1
|
59
|
+
1724 42 2.1
|
60
|
+
1946 3 3.0
|
61
|
+
1092 155 3.6
|
62
|
+
253 1 5.1
|
63
|
+
92 27 4.1
|
64
|
+
2003 5 4.0
|
65
|
+
1642 8 4.0
|
66
|
+
550 45 3.5
|
67
|
+
1853 35 3.0
|
68
|
+
667 5 4.1
|
69
|
+
1736 11 3.0
|
70
|
+
1856 803 4.6
|
71
|
+
1860 803 3.6
|
72
|
+
312 1 4.5
|
73
|
+
1080 108 3.1
|
74
|
+
1857 2 3.6
|
75
|
+
691 25 4.1
|
76
|
+
474 7 3.1
|
77
|
+
795 27 4.1
|
78
|
+
380 26 5.0
|
79
|
+
1452 1 3.0
|
80
|
+
2248 1 3.1
|
81
|
+
165 4 4.1
|
82
|
+
1910 45 4.0
|
83
|
+
42 43 5.0
|
84
|
+
60 777 4.1
|
85
|
+
1312 27 4.0
|
86
|
+
887 1 4.0
|
87
|
+
2060 7 3.0
|
88
|
+
2307 43 3.0
|
89
|
+
1374 183 3.5
|
90
|
+
2220 63 4.1
|
91
|
+
1869 1 4.5
|
92
|
+
1252 26 5.0
|
93
|
+
2114 5 4.1
|
94
|
+
75 42 3.0
|
95
|
+
1069 76 4.1
|
96
|
+
298 26 3.5
|
97
|
+
1172 6 3.0
|
98
|
+
426 43 3.0
|
99
|
+
1874 29 3.0
|
100
|
+
221 50 4.0
|
101
|
+
907 1 5.0
|
102
|
+
924 84 2.0
|
103
|
+
380 86 4.0
|
104
|
+
408 100 4.0
|
105
|
+
2234 24 3.1
|
106
|
+
70 41 4.1
|
107
|
+
587 17 3.0
|
108
|
+
1951 836 5.0
|
109
|
+
780 25 3.1
|
110
|
+
1970 1258 4.0
|
111
|
+
749 1105 2.5
|
112
|
+
2301 39 3.0
|
113
|
+
16 35 4.0
|
114
|
+
1565 84 2.1
|
115
|
+
44 5 5.1
|
116
|
+
624 6 4.1
|
117
|
+
48 5 3.1
|
118
|
+
1475 6 1.1
|
119
|
+
1022 128 4.5
|
120
|
+
2236 63 2.0
|
121
|
+
350 26 5.0
|
122
|
+
696 8 3.5
|
123
|
+
2173 84 3.0
|
124
|
+
1069 64 4.1
|
125
|
+
18 83 4.1
|
126
|
+
1558 1 5.0
|
127
|
+
1422 25 5.0
|
128
|
+
1188 84 4.1
|
129
|
+
2107 3 3.5
|
130
|
+
332 565 3.5
|
131
|
+
2271 46 4.0
|
132
|
+
2178 45 2.0
|
133
|
+
748 7 2.6
|
134
|
+
2279 7 5.1
|
135
|
+
455 35 2.6
|
136
|
+
1225 3 5.1
|
137
|
+
1304 35 4.1
|
138
|
+
493 118 3.0
|
139
|
+
1831 32 3.1
|
140
|
+
2233 23 4.1
|
141
|
+
2015 1 3.6
|
142
|
+
1052 35 3.5
|
143
|
+
801 139 4.0
|
144
|
+
241 116 2.6
|
145
|
+
1627 26 4.6
|
146
|
+
1431 45 5.0
|
147
|
+
654 35 5.1
|
148
|
+
2166 7 3.0
|
149
|
+
604 84 3.0
|
150
|
+
1196 143 4.5
|
151
|
+
236 17 3.0
|
152
|
+
1528 43 1.6
|
153
|
+
339 42 4.0
|
154
|
+
440 45 2.1
|
155
|
+
962 49 4.1
|
156
|
+
1506 1 3.0
|
157
|
+
940 61 4.1
|
158
|
+
637 45 3.5
|
159
|
+
612 23 3.1
|
160
|
+
524 43 4.1
|
161
|
+
2236 18 4.1
|
162
|
+
2298 8 3.1
|
163
|
+
634 1 5.1
|
164
|
+
1466 3 5.1
|
165
|
+
265 45 4.0
|
166
|
+
1970 77 2.6
|
167
|
+
400 128 3.0
|
168
|
+
899 143 3.1
|
169
|
+
1691 45 4.1
|
170
|
+
60 1063 4.1
|
171
|
+
2062 45 4.1
|
172
|
+
2133 9 2.0
|
173
|
+
2121 1 3.1
|
174
|
+
1662 128 5.1
|
175
|
+
1844 58 5.1
|
176
|
+
547 35 3.1
|
177
|
+
687 11 2.1
|
178
|
+
2064 3 3.0
|
179
|
+
2172 6 4.5
|
180
|
+
1302 65 5.1
|
181
|
+
1722 27 3.6
|
182
|
+
637 131 3.6
|
183
|
+
1522 3 5.0
|
184
|
+
2173 166 3.0
|
185
|
+
1317 1 4.5
|
186
|
+
551 117 5.0
|
187
|
+
696 35 4.0
|
188
|
+
1736 8 5.0
|
189
|
+
9 131 1.6
|
190
|
+
200 3 4.0
|
191
|
+
1499 45 4.1
|
192
|
+
881 80 3.0
|
193
|
+
1690 7 5.0
|
194
|
+
964 26 3.6
|
195
|
+
877 84 3.5
|
196
|
+
706 4 3.1
|
197
|
+
649 8 4.1
|
198
|
+
942 84 3.1
|
199
|
+
178 1 5.1
|
200
|
+
379 7 4.0
|
201
|
+
1860 2 4.5
|
202
|
+
317 8 3.6
|
203
|
+
2039 45 3.0
|
204
|
+
1340 1 5.0
|
205
|
+
493 97 1.0
|
206
|
+
1198 7 3.0
|
207
|
+
194 13 3.5
|
208
|
+
1514 84 3.6
|
209
|
+
2052 45 3.1
|
210
|
+
2258 8 3.1
|
211
|
+
1061 135 2.5
|
212
|
+
750 66 2.0
|
213
|
+
722 84 3.5
|
214
|
+
1890 80 2.0
|
215
|
+
1857 3 4.0
|
216
|
+
1592 1 3.5
|
217
|
+
1894 60 3.0
|
218
|
+
2058 45 4.5
|
219
|
+
1369 20 5.1
|
220
|
+
450 7 2.0
|
221
|
+
940 18 5.0
|
222
|
+
278 6 3.0
|
223
|
+
749 145 3.1
|
224
|
+
763 7 4.1
|
225
|
+
1181 84 4.1
|
226
|
+
1616 84 3.0
|
227
|
+
474 6 5.1
|
228
|
+
634 1227 2.0
|
229
|
+
746 3 3.1
|
230
|
+
540 7 5.1
|
231
|
+
1380 45 1.6
|
232
|
+
278 15 3.0
|
233
|
+
2285 1 1.6
|
234
|
+
461 35 3.0
|
235
|
+
2053 2 5.1
|
236
|
+
1662 43 3.6
|
237
|
+
667 25 4.0
|
238
|
+
285 3 3.0
|
239
|
+
1778 1 5.0
|
240
|
+
1241 1 5.0
|
241
|
+
18 45 4.1
|
242
|
+
1440 7 4.0
|
243
|
+
1069 23 5.1
|
244
|
+
904 66 4.1
|
245
|
+
1518 84 4.1
|
246
|
+
1688 3 3.6
|
247
|
+
1523 191 3.0
|
248
|
+
1215 1272 4.0
|
249
|
+
1212 7 3.1
|
250
|
+
1960 84 4.0
|
251
|
+
1778 84 2.5
|
252
|
+
1865 23 5.1
|
253
|
+
1858 30 3.1
|
254
|
+
1986 3 3.0
|
255
|
+
1230 1 5.1
|
256
|
+
1572 84 3.1
|
257
|
+
2052 3 3.0
|
258
|
+
1252 49 1.0
|
259
|
+
1791 1 5.0
|
260
|
+
2222 5 3.0
|
261
|
+
1609 27 4.0
|
262
|
+
1968 803 4.0
|
263
|
+
2068 3 0.6
|
264
|
+
1300 26 3.6
|
265
|
+
2091 20 4.0
|
266
|
+
1236 26 4.1
|
267
|
+
1359 45 4.0
|
268
|
+
1026 1 5.1
|
269
|
+
2077 1 5.1
|
270
|
+
659 35 5.1
|
271
|
+
1543 68 4.1
|
272
|
+
518 4 3.5
|
273
|
+
749 44 4.6
|
274
|
+
1573 803 3.6
|
275
|
+
1431 42 5.0
|
276
|
+
1026 26 5.1
|
277
|
+
1272 7 5.0
|
278
|
+
1906 3 1.0
|
279
|
+
615 35 2.0
|
280
|
+
370 84 4.0
|
281
|
+
1447 1 4.0
|
282
|
+
543 65 3.0
|
283
|
+
827 91 5.0
|
284
|
+
433 64 4.0
|
285
|
+
1719 43 5.0
|
286
|
+
36 26 4.0
|
287
|
+
1297 1 4.1
|
288
|
+
1864 22 4.0
|
289
|
+
801 9 4.0
|
290
|
+
1463 22 4.1
|
291
|
+
1615 3 5.0
|
292
|
+
114 26 4.0
|
293
|
+
1660 44 5.0
|
294
|
+
2005 45 4.0
|
295
|
+
2265 95 4.0
|
296
|
+
2080 116 2.1
|
297
|
+
1602 7 3.1
|
298
|
+
1226 7 3.1
|
299
|
+
1813 84 4.0
|
300
|
+
1763 43 3.5
|
301
|
+
518 178 4.1
|
302
|
+
1860 135 1.5
|
303
|
+
1814 63 2.1
|
304
|
+
317 26 4.6
|
305
|
+
14 43 4.0
|
306
|
+
549 7 3.6
|
307
|
+
1430 35 2.0
|
308
|
+
1523 146 3.0
|
309
|
+
2104 128 5.0
|
310
|
+
1431 24 2.1
|
311
|
+
367 5 3.0
|
312
|
+
1649 100 2.1
|
313
|
+
1474 20 3.0
|
314
|
+
2285 8 3.1
|
315
|
+
554 2 3.1
|
316
|
+
1186 35 5.0
|
317
|
+
281 65 3.1
|
318
|
+
554 8 2.0
|
319
|
+
517 1 2.0
|
320
|
+
519 7 4.1
|
321
|
+
1910 7 3.1
|
322
|
+
424 8 3.1
|
323
|
+
506 23 4.1
|
324
|
+
1963 128 4.6
|
325
|
+
1332 7 5.1
|
326
|
+
1474 81 3.1
|
327
|
+
1864 25 3.1
|
328
|
+
2168 2 4.0
|
329
|
+
2099 3 4.0
|
330
|
+
323 175 0.6
|
331
|
+
1860 155 2.5
|
332
|
+
1970 64 3.5
|
333
|
+
2178 42 3.1
|
334
|
+
1523 6 2.5
|
335
|
+
168 4 4.0
|
336
|
+
1437 25 3.0
|
337
|
+
60 134 2.0
|
338
|
+
1232 41 4.1
|
339
|
+
956 3 3.0
|
340
|
+
2044 6 4.0
|
341
|
+
2178 58 4.0
|
342
|
+
1430 66 4.0
|
343
|
+
1437 1 4.0
|
344
|
+
116 3 3.1
|
345
|
+
2084 45 3.0
|
346
|
+
188 1 4.1
|
347
|
+
904 45 4.0
|
348
|
+
1244 45 4.6
|
349
|
+
256 17 5.1
|
350
|
+
278 49 5.1
|
351
|
+
1689 803 3.5
|
352
|
+
163 128 4.5
|
353
|
+
43 64 2.1
|
354
|
+
1749 128 4.5
|
355
|
+
1917 1 5.1
|
356
|
+
1688 146 4.1
|
357
|
+
1019 68 4.1
|
358
|
+
820 7 3.0
|
359
|
+
1357 58 3.1
|
360
|
+
2200 17 4.0
|
361
|
+
900 5 4.6
|
362
|
+
2244 13 4.0
|
363
|
+
1569 990 2.1
|
364
|
+
2200 100 2.1
|
365
|
+
2258 45 2.0
|
366
|
+
1454 35 3.0
|
367
|
+
194 909 4.1
|
368
|
+
781 1 4.0
|
369
|
+
1523 68 3.5
|
370
|
+
2147 3 4.1
|
371
|
+
2140 3 3.1
|
372
|
+
2133 33 3.0
|
373
|
+
2070 2 4.6
|
374
|
+
1232 23 1.1
|
375
|
+
942 63 3.1
|
376
|
+
1645 1148 3.0
|
377
|
+
1291 45 5.0
|
378
|
+
592 62 5.0
|
379
|
+
702 45 3.6
|
380
|
+
1696 26 4.0
|
381
|
+
33 1 4.6
|
382
|
+
899 6 3.1
|
383
|
+
157 117 4.0
|
384
|
+
1548 26 4.1
|
385
|
+
1523 836 3.1
|
386
|
+
612 24 2.1
|
387
|
+
1805 61 3.6
|
388
|
+
865 803 5.1
|
389
|
+
2248 8 3.0
|
390
|
+
865 836 4.5
|
391
|
+
1428 1 4.0
|
392
|
+
1597 27 4.6
|
393
|
+
935 42 3.1
|
394
|
+
1071 75 3.1
|
395
|
+
1991 144 3.6
|
396
|
+
221 45 3.1
|
397
|
+
1530 11 3.1
|
398
|
+
891 23 4.0
|
399
|
+
387 1 3.0
|
400
|
+
1445 100 4.5
|
401
|
+
75 23 3.0
|
402
|
+
312 45 3.5
|
403
|
+
836 7 3.5
|
404
|
+
1977 166 3.6
|
405
|
+
1864 66 4.0
|
406
|
+
697 35 1.1
|
407
|
+
1226 8 4.1
|
408
|
+
464 27 4.0
|
409
|
+
1860 178 4.1
|
410
|
+
637 75 4.5
|
411
|
+
1867 84 3.5
|
412
|
+
458 3 3.1
|
413
|
+
635 27 4.1
|
414
|
+
1131 25 5.0
|
415
|
+
506 8 3.5
|
416
|
+
2070 1 4.5
|
417
|
+
1252 1 5.1
|
418
|
+
319 128 4.1
|
419
|
+
1530 72 4.1
|
420
|
+
429 45 3.5
|
421
|
+
1524 29 3.1
|
422
|
+
696 95 4.0
|
423
|
+
1627 1 5.1
|
424
|
+
2034 26 5.0
|
425
|
+
555 128 4.6
|
426
|
+
616 7 3.0
|
427
|
+
642 35 3.1
|
428
|
+
2258 3 2.1
|
429
|
+
942 35 4.0
|
430
|
+
928 20 4.1
|
431
|
+
1860 43 5.0
|
432
|
+
1977 27 4.6
|
433
|
+
1236 3 4.0
|
434
|
+
1232 58 3.0
|
435
|
+
864 43 4.0
|
436
|
+
759 1 3.1
|
437
|
+
1689 35 4.1
|
438
|
+
1990 25 5.0
|
439
|
+
1603 7 3.0
|
440
|
+
190 129 1.5
|
441
|
+
1689 909 2.1
|
442
|
+
1970 1227 3.5
|
443
|
+
899 35 3.1
|
444
|
+
91 35 3.1
|
445
|
+
1264 777 1.5
|
446
|
+
1785 84 3.0
|
447
|
+
1160 20 5.1
|
448
|
+
1857 6 4.0
|
449
|
+
1214 129 4.0
|
450
|
+
260 26 4.1
|
451
|
+
39 6 3.1
|
452
|
+
42 45 4.0
|
453
|
+
945 26 3.1
|
454
|
+
1440 17 3.1
|
455
|
+
1389 35 3.5
|
456
|
+
1536 8 4.0
|
457
|
+
433 39 3.1
|
458
|
+
518 3 4.0
|
459
|
+
929 1 3.0
|
460
|
+
319 1233 2.5
|
461
|
+
60 45 4.1
|
462
|
+
1545 5 4.1
|
463
|
+
958 45 3.1
|
464
|
+
827 20 3.0
|
465
|
+
899 990 3.0
|
466
|
+
1523 100 2.0
|
467
|
+
846 35 4.1
|
468
|
+
799 45 4.6
|
469
|
+
1306 5 5.0
|
470
|
+
1422 45 3.0
|
471
|
+
750 26 4.1
|
472
|
+
590 777 3.1
|
473
|
+
427 31 4.0
|
474
|
+
2044 73 3.5
|
475
|
+
1525 49 5.1
|
476
|
+
498 1 5.1
|
477
|
+
2264 164 0.5
|
478
|
+
2279 5 4.1
|
479
|
+
1655 22 2.0
|
480
|
+
1461 6 5.0
|
481
|
+
1461 8 3.0
|
482
|
+
893 128 5.1
|
483
|
+
190 1 3.6
|
484
|
+
1523 95 3.1
|
485
|
+
611 35 5.1
|
486
|
+
1738 1272 5.1
|
487
|
+
904 5 4.1
|
488
|
+
1860 80 4.6
|
489
|
+
82 84 4.1
|
490
|
+
1689 4 1.1
|
491
|
+
2052 42 2.6
|
492
|
+
1781 84 4.1
|
493
|
+
875 26 4.0
|
494
|
+
2200 104 3.0
|
495
|
+
816 8 5.0
|
496
|
+
1042 30 3.1
|
497
|
+
696 3 3.1
|
498
|
+
60 909 2.6
|
499
|
+
819 7 4.0
|
500
|
+
706 6 2.1
|
501
|
+
437 84 4.0
|
502
|
+
60 745 3.1
|
503
|
+
1830 7 2.1
|
504
|
+
839 1 5.0
|
505
|
+
1359 74 4.1
|
506
|
+
1813 9 3.1
|
507
|
+
260 43 3.0
|
508
|
+
2111 25 4.1
|
509
|
+
147 26 4.0
|
510
|
+
260 35 4.0
|
511
|
+
812 2 3.5
|
512
|
+
2115 166 4.0
|
513
|
+
2168 5 2.0
|
514
|
+
2039 26 3.0
|
515
|
+
935 23 3.0
|
516
|
+
1722 17 2.0
|
517
|
+
881 84 5.0
|
518
|
+
838 27 3.0
|
519
|
+
1430 17 4.0
|
520
|
+
1689 73 1.0
|
521
|
+
260 4 3.1
|
522
|
+
222 1 5.1
|
523
|
+
834 58 5.1
|
524
|
+
687 35 4.1
|
525
|
+
1010 5 4.1
|
526
|
+
1052 84 4.0
|
527
|
+
1755 5 4.1
|
528
|
+
221 7 4.1
|
529
|
+
295 39 1.1
|
530
|
+
600 178 5.0
|
531
|
+
289 43 3.1
|
532
|
+
1340 17 3.1
|
533
|
+
122 2 3.1
|
534
|
+
2173 191 1.6
|
535
|
+
2115 1 5.1
|
536
|
+
1754 31 4.0
|
537
|
+
405 27 5.1
|
538
|
+
586 128 4.1
|
539
|
+
1672 191 3.1
|
540
|
+
168 9 4.1
|
541
|
+
1758 20 5.1
|
542
|
+
1528 836 4.5
|
543
|
+
1069 13 3.0
|
544
|
+
1802 7 4.1
|
545
|
+
1035 6 5.0
|
546
|
+
175 26 4.0
|
547
|
+
1530 45 5.1
|
548
|
+
669 5 5.1
|
549
|
+
2136 7 3.1
|
550
|
+
886 1 4.1
|
551
|
+
1896 45 4.6
|
552
|
+
2041 45 4.1
|
553
|
+
2206 7 4.1
|
554
|
+
1963 155 4.5
|
555
|
+
1829 45 5.0
|
556
|
+
1466 7 5.1
|
557
|
+
1386 55 4.6
|
558
|
+
1206 26 4.0
|
559
|
+
1236 836 4.0
|
560
|
+
557 20 4.0
|
561
|
+
899 43 2.0
|
562
|
+
1827 7 4.0
|
563
|
+
1897 26 3.0
|
564
|
+
1755 45 4.6
|
565
|
+
1685 157 3.1
|
566
|
+
25 8 4.0
|
567
|
+
1865 31 3.6
|
568
|
+
1027 35 4.5
|
569
|
+
1582 1 3.6
|
570
|
+
152 6 3.0
|
571
|
+
788 84 2.5
|
572
|
+
1430 81 2.1
|
573
|
+
899 803 3.6
|
574
|
+
1665 1272 4.1
|
575
|
+
593 1 4.6
|
576
|
+
328 3 4.0
|
577
|
+
706 11 4.1
|
578
|
+
1847 4 5.1
|
579
|
+
2070 84 4.0
|
580
|
+
287 45 3.0
|
581
|
+
954 84 1.1
|
582
|
+
1335 83 2.6
|
583
|
+
1065 41 5.0
|
584
|
+
650 909 0.6
|
585
|
+
468 131 4.0
|
586
|
+
1616 69 1.0
|
587
|
+
1022 1227 4.1
|
588
|
+
749 5 3.5
|
589
|
+
1654 181 3.5
|
590
|
+
1523 74 3.0
|
591
|
+
1160 43 1.0
|
592
|
+
1791 68 5.1
|
593
|
+
1357 5 2.6
|
594
|
+
193 777 4.0
|
595
|
+
241 6 2.0
|
596
|
+
1304 26 4.0
|
597
|
+
520 8 4.1
|
598
|
+
1970 84 3.0
|
599
|
+
1239 5 3.0
|
600
|
+
1737 6 3.0
|
601
|
+
470 166 3.5
|
602
|
+
259 45 4.1
|
603
|
+
2277 3 4.1
|
604
|
+
1567 46 1.0
|
605
|
+
1792 23 1.0
|
606
|
+
476 23 3.0
|
607
|
+
1688 178 4.1
|
608
|
+
741 35 4.0
|
609
|
+
1348 1 4.5
|
610
|
+
1823 45 2.0
|
611
|
+
1455 68 4.0
|
612
|
+
2293 41 4.1
|
613
|
+
845 4 3.0
|
614
|
+
1501 63 5.1
|
615
|
+
1381 37 3.0
|
616
|
+
175 84 3.0
|
617
|
+
600 26 5.1
|
618
|
+
706 3 3.1
|
619
|
+
351 84 4.1
|
620
|
+
1069 4 4.1
|
621
|
+
1236 74 3.5
|
622
|
+
942 3 3.6
|
623
|
+
904 125 4.1
|
624
|
+
1054 146 4.1
|
625
|
+
2139 84 3.1
|
626
|
+
1441 29 4.1
|
627
|
+
1734 49 4.0
|
628
|
+
241 23 4.0
|
629
|
+
765 8 5.1
|
630
|
+
1900 45 4.6
|
631
|
+
296 136 4.1
|
632
|
+
14 46 1.0
|
633
|
+
493 19 4.0
|
634
|
+
1022 83 2.1
|
635
|
+
476 35 3.0
|
636
|
+
75 1272 3.0
|
637
|
+
999 26 2.1
|
638
|
+
2016 84 1.5
|
639
|
+
728 25 4.5
|
640
|
+
1607 26 4.1
|
641
|
+
1489 64 3.1
|
642
|
+
60 178 2.6
|
643
|
+
2173 136 2.5
|
644
|
+
1089 11 4.0
|
645
|
+
1725 1 4.1
|
646
|
+
1252 7 3.1
|
647
|
+
1791 45 4.0
|
648
|
+
414 27 4.6
|
649
|
+
1262 26 5.1
|
650
|
+
1450 20 3.0
|
651
|
+
2286 1 3.1
|
652
|
+
1995 27 4.1
|
653
|
+
463 17 5.0
|
654
|
+
1614 84 5.1
|
655
|
+
757 1 5.1
|
656
|
+
557 25 5.0
|
657
|
+
1461 30 4.1
|
658
|
+
330 7 3.0
|
659
|
+
271 45 3.1
|
660
|
+
750 87 2.0
|
661
|
+
534 3 5.0
|
662
|
+
805 7 3.0
|
663
|
+
93 1 4.0
|
664
|
+
1404 3 4.0
|
665
|
+
2174 68 4.0
|
666
|
+
1049 44 4.0
|
667
|
+
2173 134 1.5
|
668
|
+
383 84 4.0
|
669
|
+
461 836 4.6
|
670
|
+
92 39 3.1
|
671
|
+
560 131 1.6
|
672
|
+
195 66 5.0
|
673
|
+
2070 1227 4.5
|
674
|
+
1781 1 3.5
|
675
|
+
1979 6 4.0
|
676
|
+
1431 1227 4.0
|
677
|
+
969 1 4.1
|
678
|
+
1957 1 5.1
|
679
|
+
899 146 3.0
|
680
|
+
748 43 4.0
|
681
|
+
713 5 5.0
|
682
|
+
1307 26 4.1
|
683
|
+
993 131 3.0
|
684
|
+
520 25 4.0
|
685
|
+
506 75 3.0
|
686
|
+
84 58 4.0
|
687
|
+
1304 7 2.0
|
688
|
+
1530 39 3.0
|
689
|
+
1865 1 4.5
|
690
|
+
928 27 3.1
|
691
|
+
1106 6 3.0
|
692
|
+
1860 112 3.0
|
693
|
+
1613 1 5.0
|
694
|
+
1357 565 2.1
|
695
|
+
91 8 2.1
|
696
|
+
1782 1 4.1
|
697
|
+
1810 20 3.1
|
698
|
+
2058 7 0.6
|
699
|
+
567 3 4.0
|
700
|
+
750 43 3.0
|
701
|
+
637 26 5.0
|
702
|
+
1860 143 3.0
|
703
|
+
749 131 2.6
|
704
|
+
695 93 1.1
|
705
|
+
53 26 3.1
|
706
|
+
1915 45 4.1
|
707
|
+
1225 42 4.1
|
708
|
+
1111 1 4.5
|
709
|
+
1638 8 3.1
|
710
|
+
1339 26 4.0
|
711
|
+
1206 128 5.1
|
712
|
+
1849 75 4.1
|
713
|
+
21 25 5.0
|
714
|
+
1431 1224 4.1
|
715
|
+
1537 8 2.0
|
716
|
+
1022 1063 3.0
|
717
|
+
1311 5 2.0
|
718
|
+
502 26 3.1
|
719
|
+
73 6 3.1
|
720
|
+
1069 35 3.1
|
721
|
+
1523 3 3.6
|
722
|
+
1743 7 4.0
|
723
|
+
278 5 4.0
|
724
|
+
1656 45 2.6
|
725
|
+
587 84 2.0
|
726
|
+
1069 20 4.6
|
727
|
+
1691 58 4.1
|
728
|
+
1245 84 5.1
|
729
|
+
1431 27 4.0
|
730
|
+
993 128 4.1
|
731
|
+
568 26 5.1
|
732
|
+
971 6 3.0
|
733
|
+
370 7 3.0
|
734
|
+
250 7 3.1
|
735
|
+
1821 6 3.0
|
736
|
+
2267 45 4.1
|
737
|
+
1413 8 3.1
|
738
|
+
2276 68 5.1
|
739
|
+
2270 1 5.0
|
740
|
+
520 1 3.0
|
741
|
+
767 66 3.0
|
742
|
+
317 84 3.0
|
743
|
+
789 1 5.1
|
744
|
+
2248 69 4.1
|
745
|
+
524 25 4.6
|
746
|
+
132 6 3.0
|
747
|
+
1374 7 2.6
|
748
|
+
1400 7 3.1
|
749
|
+
591 43 4.1
|
750
|
+
60 61 3.0
|
751
|
+
2087 143 4.6
|
752
|
+
1421 24 4.1
|
753
|
+
587 39 4.1
|
754
|
+
982 13 3.1
|
755
|
+
1411 42 5.1
|
756
|
+
1983 45 4.1
|
757
|
+
1660 166 2.5
|
758
|
+
617 5 4.1
|
759
|
+
1735 5 3.0
|
760
|
+
1090 17 4.1
|
761
|
+
1259 1 4.0
|
762
|
+
1762 43 4.1
|
763
|
+
1564 7 3.1
|
764
|
+
1869 26 5.0
|
765
|
+
1691 1 4.0
|
766
|
+
1140 25 4.5
|
767
|
+
2013 23 4.1
|
768
|
+
597 1 3.0
|
769
|
+
2199 990 2.1
|
770
|
+
2267 58 4.1
|
771
|
+
592 49 1.1
|
772
|
+
2194 25 2.0
|
773
|
+
1456 58 5.0
|
774
|
+
1422 803 3.5
|
775
|
+
859 178 4.0
|
776
|
+
736 29 5.0
|
777
|
+
599 131 4.0
|
778
|
+
1631 27 4.1
|
779
|
+
1200 131 2.5
|
780
|
+
1090 128 4.0
|
781
|
+
545 7 3.1
|
782
|
+
822 1 4.6
|
783
|
+
1317 6 2.5
|
784
|
+
1794 43 4.0
|
785
|
+
1731 9 1.0
|
786
|
+
1185 26 5.0
|
787
|
+
465 22 5.0
|
788
|
+
1221 8 4.0
|
789
|
+
1783 1 4.0
|
790
|
+
433 5 4.0
|
791
|
+
465 6 3.0
|
792
|
+
1317 43 2.5
|
793
|
+
1445 188 3.0
|
794
|
+
438 84 3.0
|