eluka 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (73) hide show
  1. data/.document +5 -0
  2. data/DOCUMENTATION_STANDARDS +39 -0
  3. data/Gemfile +13 -0
  4. data/Gemfile.lock +20 -0
  5. data/LICENSE.txt +20 -0
  6. data/README.rdoc +19 -0
  7. data/Rakefile +69 -0
  8. data/VERSION +1 -0
  9. data/examples/example.rb +59 -0
  10. data/ext/libsvm/COPYRIGHT +31 -0
  11. data/ext/libsvm/FAQ.html +1749 -0
  12. data/ext/libsvm/Makefile +25 -0
  13. data/ext/libsvm/Makefile.win +33 -0
  14. data/ext/libsvm/README +733 -0
  15. data/ext/libsvm/extconf.rb +1 -0
  16. data/ext/libsvm/heart_scale +270 -0
  17. data/ext/libsvm/java/Makefile +25 -0
  18. data/ext/libsvm/java/libsvm.jar +0 -0
  19. data/ext/libsvm/java/libsvm/svm.java +2776 -0
  20. data/ext/libsvm/java/libsvm/svm.m4 +2776 -0
  21. data/ext/libsvm/java/libsvm/svm_model.java +21 -0
  22. data/ext/libsvm/java/libsvm/svm_node.java +6 -0
  23. data/ext/libsvm/java/libsvm/svm_parameter.java +47 -0
  24. data/ext/libsvm/java/libsvm/svm_print_interface.java +5 -0
  25. data/ext/libsvm/java/libsvm/svm_problem.java +7 -0
  26. data/ext/libsvm/java/svm_predict.java +163 -0
  27. data/ext/libsvm/java/svm_scale.java +350 -0
  28. data/ext/libsvm/java/svm_toy.java +471 -0
  29. data/ext/libsvm/java/svm_train.java +318 -0
  30. data/ext/libsvm/java/test_applet.html +1 -0
  31. data/ext/libsvm/python/Makefile +4 -0
  32. data/ext/libsvm/python/README +331 -0
  33. data/ext/libsvm/python/svm.py +259 -0
  34. data/ext/libsvm/python/svmutil.py +242 -0
  35. data/ext/libsvm/svm-predict.c +226 -0
  36. data/ext/libsvm/svm-scale.c +353 -0
  37. data/ext/libsvm/svm-toy/gtk/Makefile +22 -0
  38. data/ext/libsvm/svm-toy/gtk/callbacks.cpp +423 -0
  39. data/ext/libsvm/svm-toy/gtk/callbacks.h +54 -0
  40. data/ext/libsvm/svm-toy/gtk/interface.c +164 -0
  41. data/ext/libsvm/svm-toy/gtk/interface.h +14 -0
  42. data/ext/libsvm/svm-toy/gtk/main.c +23 -0
  43. data/ext/libsvm/svm-toy/gtk/svm-toy.glade +238 -0
  44. data/ext/libsvm/svm-toy/qt/Makefile +17 -0
  45. data/ext/libsvm/svm-toy/qt/svm-toy.cpp +413 -0
  46. data/ext/libsvm/svm-toy/windows/svm-toy.cpp +456 -0
  47. data/ext/libsvm/svm-train.c +376 -0
  48. data/ext/libsvm/svm.cpp +3060 -0
  49. data/ext/libsvm/svm.def +19 -0
  50. data/ext/libsvm/svm.h +105 -0
  51. data/ext/libsvm/svm.o +0 -0
  52. data/ext/libsvm/tools/README +149 -0
  53. data/ext/libsvm/tools/checkdata.py +108 -0
  54. data/ext/libsvm/tools/easy.py +79 -0
  55. data/ext/libsvm/tools/grid.py +359 -0
  56. data/ext/libsvm/tools/subset.py +146 -0
  57. data/ext/libsvm/windows/libsvm.dll +0 -0
  58. data/ext/libsvm/windows/svm-predict.exe +0 -0
  59. data/ext/libsvm/windows/svm-scale.exe +0 -0
  60. data/ext/libsvm/windows/svm-toy.exe +0 -0
  61. data/ext/libsvm/windows/svm-train.exe +0 -0
  62. data/lib/eluka.rb +10 -0
  63. data/lib/eluka/bijection.rb +23 -0
  64. data/lib/eluka/data_point.rb +36 -0
  65. data/lib/eluka/document.rb +47 -0
  66. data/lib/eluka/feature_vector.rb +86 -0
  67. data/lib/eluka/features.rb +31 -0
  68. data/lib/eluka/model.rb +129 -0
  69. data/lib/fselect.rb +321 -0
  70. data/lib/grid.rb +25 -0
  71. data/test/helper.rb +18 -0
  72. data/test/test_eluka.rb +7 -0
  73. metadata +214 -0
@@ -0,0 +1 @@
1
+ #!/usr/bin/env ruby
@@ -0,0 +1,270 @@
1
+ +1 1:0.708333 2:1 3:1 4:-0.320755 5:-0.105023 6:-1 7:1 8:-0.419847 9:-1 10:-0.225806 12:1 13:-1
2
+ -1 1:0.583333 2:-1 3:0.333333 4:-0.603774 5:1 6:-1 7:1 8:0.358779 9:-1 10:-0.483871 12:-1 13:1
3
+ +1 1:0.166667 2:1 3:-0.333333 4:-0.433962 5:-0.383562 6:-1 7:-1 8:0.0687023 9:-1 10:-0.903226 11:-1 12:-1 13:1
4
+ -1 1:0.458333 2:1 3:1 4:-0.358491 5:-0.374429 6:-1 7:-1 8:-0.480916 9:1 10:-0.935484 12:-0.333333 13:1
5
+ -1 1:0.875 2:-1 3:-0.333333 4:-0.509434 5:-0.347032 6:-1 7:1 8:-0.236641 9:1 10:-0.935484 11:-1 12:-0.333333 13:-1
6
+ -1 1:0.5 2:1 3:1 4:-0.509434 5:-0.767123 6:-1 7:-1 8:0.0534351 9:-1 10:-0.870968 11:-1 12:-1 13:1
7
+ +1 1:0.125 2:1 3:0.333333 4:-0.320755 5:-0.406393 6:1 7:1 8:0.0839695 9:1 10:-0.806452 12:-0.333333 13:0.5
8
+ +1 1:0.25 2:1 3:1 4:-0.698113 5:-0.484018 6:-1 7:1 8:0.0839695 9:1 10:-0.612903 12:-0.333333 13:1
9
+ +1 1:0.291667 2:1 3:1 4:-0.132075 5:-0.237443 6:-1 7:1 8:0.51145 9:-1 10:-0.612903 12:0.333333 13:1
10
+ +1 1:0.416667 2:-1 3:1 4:0.0566038 5:0.283105 6:-1 7:1 8:0.267176 9:-1 10:0.290323 12:1 13:1
11
+ -1 1:0.25 2:1 3:1 4:-0.226415 5:-0.506849 6:-1 7:-1 8:0.374046 9:-1 10:-0.83871 12:-1 13:1
12
+ -1 2:1 3:1 4:-0.0943396 5:-0.543379 6:-1 7:1 8:-0.389313 9:1 10:-1 11:-1 12:-1 13:1
13
+ -1 1:-0.375 2:1 3:0.333333 4:-0.132075 5:-0.502283 6:-1 7:1 8:0.664122 9:-1 10:-1 11:-1 12:-1 13:-1
14
+ +1 1:0.333333 2:1 3:-1 4:-0.245283 5:-0.506849 6:-1 7:-1 8:0.129771 9:-1 10:-0.16129 12:0.333333 13:-1
15
+ -1 1:0.166667 2:-1 3:1 4:-0.358491 5:-0.191781 6:-1 7:1 8:0.343511 9:-1 10:-1 11:-1 12:-0.333333 13:-1
16
+ -1 1:0.75 2:-1 3:1 4:-0.660377 5:-0.894977 6:-1 7:-1 8:-0.175573 9:-1 10:-0.483871 12:-1 13:-1
17
+ +1 1:-0.291667 2:1 3:1 4:-0.132075 5:-0.155251 6:-1 7:-1 8:-0.251908 9:1 10:-0.419355 12:0.333333 13:1
18
+ +1 2:1 3:1 4:-0.132075 5:-0.648402 6:1 7:1 8:0.282443 9:1 11:1 12:-1 13:1
19
+ -1 1:0.458333 2:1 3:-1 4:-0.698113 5:-0.611872 6:-1 7:1 8:0.114504 9:1 10:-0.419355 12:-1 13:-1
20
+ -1 1:-0.541667 2:1 3:-1 4:-0.132075 5:-0.666667 6:-1 7:-1 8:0.633588 9:1 10:-0.548387 11:-1 12:-1 13:1
21
+ +1 1:0.583333 2:1 3:1 4:-0.509434 5:-0.52968 6:-1 7:1 8:-0.114504 9:1 10:-0.16129 12:0.333333 13:1
22
+ -1 1:-0.208333 2:1 3:-0.333333 4:-0.320755 5:-0.456621 6:-1 7:1 8:0.664122 9:-1 10:-0.935484 12:-1 13:-1
23
+ -1 1:-0.416667 2:1 3:1 4:-0.603774 5:-0.191781 6:-1 7:-1 8:0.679389 9:-1 10:-0.612903 12:-1 13:-1
24
+ -1 1:-0.25 2:1 3:1 4:-0.660377 5:-0.643836 6:-1 7:-1 8:0.0992366 9:-1 10:-0.967742 11:-1 12:-1 13:-1
25
+ -1 1:0.0416667 2:-1 3:-0.333333 4:-0.283019 5:-0.260274 6:1 7:1 8:0.343511 9:1 10:-1 11:-1 12:-0.333333 13:-1
26
+ -1 1:-0.208333 2:-1 3:0.333333 4:-0.320755 5:-0.319635 6:-1 7:-1 8:0.0381679 9:-1 10:-0.935484 11:-1 12:-1 13:-1
27
+ -1 1:-0.291667 2:-1 3:1 4:-0.169811 5:-0.465753 6:-1 7:1 8:0.236641 9:1 10:-1 12:-1 13:-1
28
+ -1 1:-0.0833333 2:-1 3:0.333333 4:-0.509434 5:-0.228311 6:-1 7:1 8:0.312977 9:-1 10:-0.806452 11:-1 12:-1 13:-1
29
+ +1 1:0.208333 2:1 3:0.333333 4:-0.660377 5:-0.525114 6:-1 7:1 8:0.435115 9:-1 10:-0.193548 12:-0.333333 13:1
30
+ -1 1:0.75 2:-1 3:0.333333 4:-0.698113 5:-0.365297 6:1 7:1 8:-0.0992366 9:-1 10:-1 11:-1 12:-0.333333 13:-1
31
+ +1 1:0.166667 2:1 3:0.333333 4:-0.358491 5:-0.52968 6:-1 7:1 8:0.206107 9:-1 10:-0.870968 12:-0.333333 13:1
32
+ -1 1:0.541667 2:1 3:1 4:0.245283 5:-0.534247 6:-1 7:1 8:0.0229008 9:-1 10:-0.258065 11:-1 12:-1 13:0.5
33
+ -1 1:-0.666667 2:-1 3:0.333333 4:-0.509434 5:-0.593607 6:-1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
34
+ +1 1:0.25 2:1 3:1 4:0.433962 5:-0.086758 6:-1 7:1 8:0.0534351 9:1 10:0.0967742 11:1 12:-1 13:1
35
+ +1 1:-0.125 2:1 3:1 4:-0.0566038 5:-0.6621 6:-1 7:1 8:-0.160305 9:1 10:-0.709677 12:-1 13:1
36
+ +1 1:-0.208333 2:1 3:1 4:-0.320755 5:-0.406393 6:1 7:1 8:0.206107 9:1 10:-1 11:-1 12:0.333333 13:1
37
+ +1 1:0.333333 2:1 3:1 4:-0.132075 5:-0.630137 6:-1 7:1 8:0.0229008 9:1 10:-0.387097 11:-1 12:-0.333333 13:1
38
+ +1 1:0.25 2:1 3:-1 4:0.245283 5:-0.328767 6:-1 7:1 8:-0.175573 9:-1 10:-1 11:-1 12:-1 13:-1
39
+ -1 1:-0.458333 2:1 3:0.333333 4:-0.320755 5:-0.753425 6:-1 7:-1 8:0.206107 9:-1 10:-1 11:-1 12:-1 13:-1
40
+ -1 1:-0.208333 2:1 3:1 4:-0.471698 5:-0.561644 6:-1 7:1 8:0.755725 9:-1 10:-1 11:-1 12:-1 13:-1
41
+ +1 1:-0.541667 2:1 3:1 4:0.0943396 5:-0.557078 6:-1 7:-1 8:0.679389 9:-1 10:-1 11:-1 12:-1 13:1
42
+ -1 1:0.375 2:-1 3:1 4:-0.433962 5:-0.621005 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
43
+ -1 1:-0.375 2:1 3:0.333333 4:-0.320755 5:-0.511416 6:-1 7:-1 8:0.648855 9:1 10:-0.870968 11:-1 12:-1 13:-1
44
+ -1 1:-0.291667 2:1 3:-0.333333 4:-0.867925 5:-0.675799 6:1 7:-1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:1
45
+ +1 1:0.25 2:1 3:0.333333 4:-0.396226 5:-0.579909 6:1 7:-1 8:-0.0381679 9:-1 10:-0.290323 12:-0.333333 13:0.5
46
+ -1 1:0.208333 2:1 3:0.333333 4:-0.132075 5:-0.611872 6:1 7:1 8:0.435115 9:-1 10:-1 11:-1 12:-1 13:-1
47
+ +1 1:-0.166667 2:1 3:0.333333 4:-0.54717 5:-0.894977 6:-1 7:1 8:-0.160305 9:-1 10:-0.741935 11:-1 12:1 13:-1
48
+ +1 1:-0.375 2:1 3:1 4:-0.698113 5:-0.675799 6:-1 7:1 8:0.618321 9:-1 10:-1 11:-1 12:-0.333333 13:-1
49
+ +1 1:0.541667 2:1 3:-0.333333 4:0.245283 5:-0.452055 6:-1 7:-1 8:-0.251908 9:1 10:-1 12:1 13:0.5
50
+ +1 1:0.5 2:-1 3:1 4:0.0566038 5:-0.547945 6:-1 7:1 8:-0.343511 9:-1 10:-0.677419 12:1 13:1
51
+ +1 1:-0.458333 2:1 3:1 4:-0.207547 5:-0.136986 6:-1 7:-1 8:-0.175573 9:1 10:-0.419355 12:-1 13:0.5
52
+ -1 1:-0.0416667 2:1 3:-0.333333 4:-0.358491 5:-0.639269 6:1 7:-1 8:0.725191 9:-1 10:-1 11:-1 12:-1 13:-1
53
+ -1 1:0.5 2:-1 3:0.333333 4:-0.132075 5:0.328767 6:1 7:1 8:0.312977 9:-1 10:-0.741935 11:-1 12:-0.333333 13:-1
54
+ -1 1:0.416667 2:-1 3:-0.333333 4:-0.132075 5:-0.684932 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:0.333333 13:-1
55
+ -1 1:-0.333333 2:-1 3:-0.333333 4:-0.320755 5:-0.506849 6:-1 7:1 8:0.587786 9:-1 10:-0.806452 12:-1 13:-1
56
+ -1 1:-0.5 2:-1 3:-0.333333 4:-0.792453 5:-0.671233 6:-1 7:-1 8:0.480916 9:-1 10:-1 11:-1 12:-0.333333 13:-1
57
+ +1 1:0.333333 2:1 3:1 4:-0.169811 5:-0.817352 6:-1 7:1 8:-0.175573 9:1 10:0.16129 12:-0.333333 13:-1
58
+ -1 1:0.291667 2:-1 3:0.333333 4:-0.509434 5:-0.762557 6:1 7:-1 8:-0.618321 9:-1 10:-1 11:-1 12:-1 13:-1
59
+ +1 1:0.25 2:-1 3:1 4:0.509434 5:-0.438356 6:-1 7:-1 8:0.0992366 9:1 10:-1 12:-1 13:-1
60
+ +1 1:0.375 2:1 3:-0.333333 4:-0.509434 5:-0.292237 6:-1 7:1 8:-0.51145 9:-1 10:-0.548387 12:-0.333333 13:1
61
+ -1 1:0.166667 2:1 3:0.333333 4:0.0566038 5:-1 6:1 7:-1 8:0.557252 9:-1 10:-0.935484 11:-1 12:-0.333333 13:1
62
+ +1 1:-0.0833333 2:-1 3:1 4:-0.320755 5:-0.182648 6:-1 7:-1 8:0.0839695 9:1 10:-0.612903 12:-1 13:1
63
+ -1 1:-0.375 2:1 3:0.333333 4:-0.509434 5:-0.543379 6:-1 7:-1 8:0.496183 9:-1 10:-1 11:-1 12:-1 13:-1
64
+ -1 1:0.291667 2:-1 3:-1 4:0.0566038 5:-0.479452 6:-1 7:-1 8:0.526718 9:-1 10:-0.709677 11:-1 12:-1 13:-1
65
+ -1 1:0.416667 2:1 3:-1 4:-0.0377358 5:-0.511416 6:1 7:1 8:0.206107 9:-1 10:-0.258065 11:1 12:-1 13:0.5
66
+ +1 1:0.166667 2:1 3:1 4:0.0566038 5:-0.315068 6:-1 7:1 8:-0.374046 9:1 10:-0.806452 12:-0.333333 13:0.5
67
+ -1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.383562 6:-1 7:1 8:0.755725 9:1 10:-1 11:-1 12:-1 13:-1
68
+ +1 1:0.208333 2:-1 3:-0.333333 4:-0.207547 5:-0.118721 6:1 7:1 8:0.236641 9:-1 10:-1 11:-1 12:0.333333 13:-1
69
+ -1 1:-0.375 2:-1 3:0.333333 4:-0.54717 5:-0.47032 6:-1 7:-1 8:0.19084 9:-1 10:-0.903226 12:-0.333333 13:-1
70
+ +1 1:-0.25 2:1 3:0.333333 4:-0.735849 5:-0.465753 6:-1 7:-1 8:0.236641 9:-1 10:-1 11:-1 12:-1 13:-1
71
+ +1 1:0.333333 2:1 3:1 4:-0.509434 5:-0.388128 6:-1 7:-1 8:0.0534351 9:1 10:0.16129 12:-0.333333 13:1
72
+ -1 1:0.166667 2:-1 3:1 4:-0.509434 5:0.0410959 6:-1 7:-1 8:0.40458 9:1 10:-0.806452 11:-1 12:-1 13:-1
73
+ -1 1:0.708333 2:1 3:-0.333333 4:0.169811 5:-0.456621 6:-1 7:1 8:0.0992366 9:-1 10:-1 11:-1 12:-1 13:-1
74
+ -1 1:0.958333 2:-1 3:0.333333 4:-0.132075 5:-0.675799 6:-1 8:-0.312977 9:-1 10:-0.645161 12:-1 13:-1
75
+ -1 1:0.583333 2:-1 3:1 4:-0.773585 5:-0.557078 6:-1 7:-1 8:0.0839695 9:-1 10:-0.903226 11:-1 12:0.333333 13:-1
76
+ +1 1:-0.333333 2:1 3:1 4:-0.0943396 5:-0.164384 6:-1 7:1 8:0.160305 9:1 10:-1 12:1 13:1
77
+ -1 1:-0.333333 2:1 3:1 4:-0.811321 5:-0.625571 6:-1 7:1 8:0.175573 9:1 10:-0.0322581 12:-1 13:-1
78
+ -1 1:-0.583333 2:-1 3:0.333333 4:-1 5:-0.666667 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:-1 13:-1
79
+ -1 1:-0.458333 2:-1 3:0.333333 4:-0.509434 5:-0.621005 6:-1 7:-1 8:0.557252 9:-1 10:-1 12:-1 13:-1
80
+ -1 1:0.125 2:1 3:-0.333333 4:-0.509434 5:-0.497717 6:-1 7:-1 8:0.633588 9:-1 10:-0.741935 11:-1 12:-1 13:-1
81
+ +1 1:0.208333 2:1 3:1 4:-0.0188679 5:-0.579909 6:-1 7:-1 8:-0.480916 9:-1 10:-0.354839 12:-0.333333 13:1
82
+ +1 1:-0.75 2:1 3:1 4:-0.509434 5:-0.671233 6:-1 7:-1 8:-0.0992366 9:1 10:-0.483871 12:-1 13:1
83
+ +1 1:0.208333 2:1 3:1 4:0.0566038 5:-0.342466 6:-1 7:1 8:-0.389313 9:1 10:-0.741935 11:-1 12:-1 13:1
84
+ -1 1:-0.5 2:1 3:0.333333 4:-0.320755 5:-0.598174 6:-1 7:1 8:0.480916 9:-1 10:-0.354839 12:-1 13:-1
85
+ -1 1:0.166667 2:1 3:1 4:-0.698113 5:-0.657534 6:-1 7:-1 8:-0.160305 9:1 10:-0.516129 12:-1 13:0.5
86
+ -1 1:-0.458333 2:1 3:-1 4:0.0188679 5:-0.461187 6:-1 7:1 8:0.633588 9:-1 10:-0.741935 11:-1 12:0.333333 13:-1
87
+ -1 1:0.375 2:1 3:-0.333333 4:-0.358491 5:-0.625571 6:1 7:1 8:0.0534351 9:-1 10:-1 11:-1 12:-1 13:-1
88
+ -1 1:0.25 2:1 3:-1 4:0.584906 5:-0.342466 6:-1 7:1 8:0.129771 9:-1 10:0.354839 11:1 12:-1 13:1
89
+ -1 1:-0.5 2:-1 3:-0.333333 4:-0.396226 5:-0.178082 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
90
+ +1 1:-0.125 2:1 3:1 4:0.0566038 5:-0.465753 6:-1 7:1 8:-0.129771 9:-1 10:-0.16129 12:-1 13:1
91
+ -1 1:0.25 2:1 3:-0.333333 4:-0.132075 5:-0.56621 6:-1 7:-1 8:0.419847 9:1 10:-1 11:-1 12:-1 13:-1
92
+ +1 1:0.333333 2:-1 3:1 4:-0.320755 5:-0.0684932 6:-1 7:1 8:0.496183 9:-1 10:-1 11:-1 12:-1 13:-1
93
+ +1 1:0.0416667 2:1 3:1 4:-0.433962 5:-0.360731 6:-1 7:1 8:-0.419847 9:1 10:-0.290323 12:-0.333333 13:1
94
+ +1 1:0.0416667 2:1 3:1 4:-0.698113 5:-0.634703 6:-1 7:1 8:-0.435115 9:1 10:-1 12:-0.333333 13:-1
95
+ +1 1:-0.0416667 2:1 3:1 4:-0.415094 5:-0.607306 6:-1 7:-1 8:0.480916 9:-1 10:-0.677419 11:-1 12:0.333333 13:1
96
+ +1 1:-0.25 2:1 3:1 4:-0.698113 5:-0.319635 6:-1 7:1 8:-0.282443 9:1 10:-0.677419 12:-0.333333 13:-1
97
+ -1 1:0.541667 2:1 3:1 4:-0.509434 5:-0.196347 6:-1 7:1 8:0.221374 9:-1 10:-0.870968 12:-1 13:-1
98
+ +1 1:0.208333 2:1 3:1 4:-0.886792 5:-0.506849 6:-1 7:-1 8:0.29771 9:-1 10:-0.967742 11:-1 12:-0.333333 13:1
99
+ -1 1:0.458333 2:-1 3:0.333333 4:-0.132075 5:-0.146119 6:-1 7:-1 8:-0.0534351 9:-1 10:-0.935484 11:-1 12:-1 13:1
100
+ -1 1:-0.125 2:-1 3:-0.333333 4:-0.509434 5:-0.461187 6:-1 7:-1 8:0.389313 9:-1 10:-0.645161 11:-1 12:-1 13:-1
101
+ -1 1:-0.375 2:-1 3:0.333333 4:-0.735849 5:-0.931507 6:-1 7:-1 8:0.587786 9:-1 10:-0.806452 12:-1 13:-1
102
+ +1 1:0.583333 2:1 3:1 4:-0.509434 5:-0.493151 6:-1 7:-1 8:-1 9:-1 10:-0.677419 12:-1 13:-1
103
+ -1 1:-0.166667 2:-1 3:1 4:-0.320755 5:-0.347032 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
104
+ +1 1:0.166667 2:1 3:1 4:0.339623 5:-0.255708 6:1 7:1 8:-0.19084 9:-1 10:-0.677419 12:1 13:1
105
+ +1 1:0.416667 2:1 3:1 4:-0.320755 5:-0.415525 6:-1 7:1 8:0.160305 9:-1 10:-0.548387 12:-0.333333 13:1
106
+ +1 1:-0.208333 2:1 3:1 4:-0.433962 5:-0.324201 6:-1 7:1 8:0.450382 9:-1 10:-0.83871 12:-1 13:1
107
+ -1 1:-0.0833333 2:1 3:0.333333 4:-0.886792 5:-0.561644 6:-1 7:-1 8:0.0992366 9:1 10:-0.612903 12:-1 13:-1
108
+ +1 1:0.291667 2:-1 3:1 4:0.0566038 5:-0.39726 6:-1 7:1 8:0.312977 9:-1 10:-0.16129 12:0.333333 13:1
109
+ +1 1:0.25 2:1 3:1 4:-0.132075 5:-0.767123 6:-1 7:-1 8:0.389313 9:1 10:-1 11:-1 12:-0.333333 13:1
110
+ -1 1:-0.333333 2:-1 3:-0.333333 4:-0.660377 5:-0.844749 6:-1 7:-1 8:0.0229008 9:-1 10:-1 12:-1 13:-1
111
+ +1 1:0.0833333 2:-1 3:1 4:0.622642 5:-0.0821918 6:-1 8:-0.29771 9:1 10:0.0967742 12:-1 13:-1
112
+ -1 1:-0.5 2:1 3:-0.333333 4:-0.698113 5:-0.502283 6:-1 7:-1 8:0.251908 9:-1 10:-1 11:-1 12:-1 13:-1
113
+ +1 1:0.291667 2:-1 3:1 4:0.207547 5:-0.182648 6:-1 7:1 8:0.374046 9:-1 10:-1 11:-1 12:-1 13:-1
114
+ -1 1:0.0416667 2:-1 3:0.333333 4:-0.226415 5:-0.187215 6:1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
115
+ -1 1:-0.458333 2:1 3:-0.333333 4:-0.509434 5:-0.228311 6:-1 7:-1 8:0.389313 9:-1 10:-1 11:-1 12:-1 13:-1
116
+ -1 1:-0.166667 2:-1 3:-0.333333 4:-0.245283 5:-0.3379 6:-1 7:-1 8:0.389313 9:-1 10:-1 12:-1 13:-1
117
+ +1 1:-0.291667 2:1 3:1 4:-0.509434 5:-0.438356 6:-1 7:1 8:0.114504 9:-1 10:-0.741935 11:-1 12:-1 13:1
118
+ +1 1:0.125 2:-1 3:1 4:1 5:-0.260274 6:1 7:1 8:-0.0534351 9:1 10:0.290323 11:1 12:0.333333 13:1
119
+ -1 1:0.541667 2:-1 3:-1 4:0.0566038 5:-0.543379 6:-1 7:-1 8:-0.343511 9:-1 10:-0.16129 11:1 12:-1 13:-1
120
+ +1 1:0.125 2:1 3:1 4:-0.320755 5:-0.283105 6:1 7:1 8:-0.51145 9:1 10:-0.483871 11:1 12:-1 13:1
121
+ +1 1:-0.166667 2:1 3:0.333333 4:-0.509434 5:-0.716895 6:-1 7:-1 8:0.0381679 9:-1 10:-0.354839 12:1 13:1
122
+ +1 1:0.0416667 2:1 3:1 4:-0.471698 5:-0.269406 6:-1 7:1 8:-0.312977 9:1 10:0.0322581 12:0.333333 13:-1
123
+ +1 1:0.166667 2:1 3:1 4:0.0943396 5:-0.324201 6:-1 7:-1 8:-0.740458 9:1 10:-0.612903 12:-0.333333 13:1
124
+ -1 1:0.5 2:-1 3:0.333333 4:0.245283 5:0.0684932 6:-1 7:1 8:0.221374 9:-1 10:-0.741935 11:-1 12:-1 13:-1
125
+ -1 1:0.0416667 2:1 3:0.333333 4:-0.415094 5:-0.328767 6:-1 7:1 8:0.236641 9:-1 10:-0.83871 11:1 12:-0.333333 13:-1
126
+ -1 1:0.0416667 2:-1 3:0.333333 4:0.245283 5:-0.657534 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-0.333333 13:-1
127
+ +1 1:0.375 2:1 3:1 4:-0.509434 5:-0.356164 6:-1 7:-1 8:-0.572519 9:1 10:-0.419355 12:0.333333 13:1
128
+ -1 1:-0.0416667 2:-1 3:0.333333 4:-0.207547 5:-0.680365 6:-1 7:1 8:0.496183 9:-1 10:-0.967742 12:-1 13:-1
129
+ -1 1:-0.0416667 2:1 3:-0.333333 4:-0.245283 5:-0.657534 6:-1 7:-1 8:0.328244 9:-1 10:-0.741935 11:-1 12:-0.333333 13:-1
130
+ +1 1:0.291667 2:1 3:1 4:-0.566038 5:-0.525114 6:1 7:-1 8:0.358779 9:1 10:-0.548387 11:-1 12:0.333333 13:1
131
+ +1 1:0.416667 2:-1 3:1 4:-0.735849 5:-0.347032 6:-1 7:-1 8:0.496183 9:1 10:-0.419355 12:0.333333 13:-1
132
+ +1 1:0.541667 2:1 3:1 4:-0.660377 5:-0.607306 6:-1 7:1 8:-0.0687023 9:1 10:-0.967742 11:-1 12:-0.333333 13:-1
133
+ -1 1:-0.458333 2:1 3:1 4:-0.132075 5:-0.543379 6:-1 7:-1 8:0.633588 9:-1 10:-1 11:-1 12:-1 13:-1
134
+ +1 1:0.458333 2:1 3:1 4:-0.509434 5:-0.452055 6:-1 7:1 8:-0.618321 9:1 10:-0.290323 11:1 12:-0.333333 13:-1
135
+ -1 1:0.0416667 2:1 3:0.333333 4:0.0566038 5:-0.515982 6:-1 7:1 8:0.435115 9:-1 10:-0.483871 11:-1 12:-1 13:1
136
+ -1 1:-0.291667 2:-1 3:0.333333 4:-0.0943396 5:-0.767123 6:-1 7:1 8:0.358779 9:1 10:-0.548387 11:1 12:-1 13:-1
137
+ -1 1:0.583333 2:-1 3:0.333333 4:0.0943396 5:-0.310502 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-0.333333 13:-1
138
+ +1 1:0.125 2:1 3:1 4:-0.415094 5:-0.438356 6:1 7:1 8:0.114504 9:1 10:-0.612903 12:-0.333333 13:-1
139
+ -1 1:-0.791667 2:-1 3:-0.333333 4:-0.54717 5:-0.616438 6:-1 7:-1 8:0.847328 9:-1 10:-0.774194 11:-1 12:-1 13:-1
140
+ -1 1:0.166667 2:1 3:1 4:-0.283019 5:-0.630137 6:-1 7:-1 8:0.480916 9:1 10:-1 11:-1 12:-1 13:1
141
+ +1 1:0.458333 2:1 3:1 4:-0.0377358 5:-0.607306 6:-1 7:1 8:-0.0687023 9:-1 10:-0.354839 12:0.333333 13:0.5
142
+ -1 1:0.25 2:1 3:1 4:-0.169811 5:-0.3379 6:-1 7:1 8:0.694656 9:-1 10:-1 11:-1 12:-1 13:-1
143
+ +1 1:-0.125 2:1 3:0.333333 4:-0.132075 5:-0.511416 6:-1 7:-1 8:0.40458 9:-1 10:-0.806452 12:-0.333333 13:1
144
+ -1 1:-0.0833333 2:1 3:-1 4:-0.415094 5:-0.60274 6:-1 7:1 8:-0.175573 9:1 10:-0.548387 11:-1 12:-0.333333 13:-1
145
+ +1 1:0.0416667 2:1 3:-0.333333 4:0.849057 5:-0.283105 6:-1 7:1 8:0.89313 9:-1 10:-1 11:-1 12:-0.333333 13:1
146
+ +1 2:1 3:1 4:-0.45283 5:-0.287671 6:-1 7:-1 8:-0.633588 9:1 10:-0.354839 12:0.333333 13:1
147
+ +1 1:-0.0416667 2:1 3:1 4:-0.660377 5:-0.525114 6:-1 7:-1 8:0.358779 9:-1 10:-1 11:-1 12:-0.333333 13:-1
148
+ +1 1:-0.541667 2:1 3:1 4:-0.698113 5:-0.812785 6:-1 7:1 8:-0.343511 9:1 10:-0.354839 12:-1 13:1
149
+ +1 1:0.208333 2:1 3:0.333333 4:-0.283019 5:-0.552511 6:-1 7:1 8:0.557252 9:-1 10:0.0322581 11:-1 12:0.333333 13:1
150
+ -1 1:-0.5 2:-1 3:0.333333 4:-0.660377 5:-0.351598 6:-1 7:1 8:0.541985 9:1 10:-1 11:-1 12:-1 13:-1
151
+ -1 1:-0.5 2:1 3:0.333333 4:-0.660377 5:-0.43379 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:-1 13:-1
152
+ -1 1:-0.125 2:-1 3:0.333333 4:-0.509434 5:-0.575342 6:-1 7:-1 8:0.328244 9:-1 10:-0.483871 12:-1 13:-1
153
+ -1 1:0.0416667 2:-1 3:0.333333 4:-0.735849 5:-0.356164 6:-1 7:1 8:0.465649 9:-1 10:-1 11:-1 12:-1 13:-1
154
+ -1 1:0.458333 2:-1 3:1 4:-0.320755 5:-0.191781 6:-1 7:-1 8:-0.221374 9:-1 10:-0.354839 12:0.333333 13:-1
155
+ -1 1:-0.0833333 2:-1 3:0.333333 4:-0.320755 5:-0.406393 6:-1 7:1 8:0.19084 9:-1 10:-0.83871 11:-1 12:-1 13:-1
156
+ -1 1:-0.291667 2:-1 3:-0.333333 4:-0.792453 5:-0.643836 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
157
+ +1 1:0.0833333 2:1 3:1 4:-0.132075 5:-0.584475 6:-1 7:-1 8:-0.389313 9:1 10:0.806452 11:1 12:-1 13:1
158
+ -1 1:-0.333333 2:1 3:-0.333333 4:-0.358491 5:-0.16895 6:-1 7:1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
159
+ -1 1:0.125 2:1 3:-1 4:-0.509434 5:-0.694064 6:-1 7:1 8:0.389313 9:-1 10:-0.387097 12:-1 13:1
160
+ +1 1:0.541667 2:-1 3:1 4:0.584906 5:-0.534247 6:1 7:-1 8:0.435115 9:1 10:-0.677419 12:0.333333 13:1
161
+ +1 1:-0.625 2:1 3:-1 4:-0.509434 5:-0.520548 6:-1 7:-1 8:0.694656 9:1 10:0.225806 12:-1 13:1
162
+ +1 1:0.375 2:-1 3:1 4:0.0566038 5:-0.461187 6:-1 7:-1 8:0.267176 9:1 10:-0.548387 12:-1 13:-1
163
+ -1 1:0.0833333 2:1 3:-0.333333 4:-0.320755 5:-0.378995 6:-1 7:-1 8:0.282443 9:-1 10:-1 11:-1 12:-1 13:-1
164
+ +1 1:0.208333 2:1 3:1 4:-0.358491 5:-0.392694 6:-1 7:1 8:-0.0992366 9:1 10:-0.0322581 12:0.333333 13:1
165
+ -1 1:-0.416667 2:1 3:1 4:-0.698113 5:-0.611872 6:-1 7:-1 8:0.374046 9:-1 10:-1 11:-1 12:-1 13:1
166
+ -1 1:0.458333 2:-1 3:1 4:0.622642 5:-0.0913242 6:-1 7:-1 8:0.267176 9:1 10:-1 11:-1 12:-1 13:-1
167
+ -1 1:-0.125 2:-1 3:1 4:-0.698113 5:-0.415525 6:-1 7:1 8:0.343511 9:-1 10:-1 11:-1 12:-1 13:-1
168
+ -1 2:1 3:0.333333 4:-0.320755 5:-0.675799 6:1 7:1 8:0.236641 9:-1 10:-0.612903 11:1 12:-1 13:-1
169
+ -1 1:-0.333333 2:-1 3:1 4:-0.169811 5:-0.497717 6:-1 7:1 8:0.236641 9:1 10:-0.935484 12:-1 13:-1
170
+ +1 1:0.5 2:1 3:-1 4:-0.169811 5:-0.287671 6:1 7:1 8:0.572519 9:-1 10:-0.548387 12:-0.333333 13:-1
171
+ -1 1:0.666667 2:1 3:-1 4:0.245283 5:-0.506849 6:1 7:1 8:-0.0839695 9:-1 10:-0.967742 12:-0.333333 13:-1
172
+ +1 1:0.666667 2:1 3:0.333333 4:-0.132075 5:-0.415525 6:-1 7:1 8:0.145038 9:-1 10:-0.354839 12:1 13:1
173
+ +1 1:0.583333 2:1 3:1 4:-0.886792 5:-0.210046 6:-1 7:1 8:-0.175573 9:1 10:-0.709677 12:0.333333 13:-1
174
+ -1 1:0.625 2:-1 3:0.333333 4:-0.509434 5:-0.611872 6:-1 7:1 8:-0.328244 9:-1 10:-0.516129 12:-1 13:-1
175
+ -1 1:-0.791667 2:1 3:-1 4:-0.54717 5:-0.744292 6:-1 7:1 8:0.572519 9:-1 10:-1 11:-1 12:-1 13:-1
176
+ +1 1:0.375 2:-1 3:1 4:-0.169811 5:-0.232877 6:1 7:-1 8:-0.465649 9:-1 10:-0.387097 12:1 13:-1
177
+ +1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.214612 6:-1 7:-1 8:-0.221374 9:1 10:0.354839 12:1 13:1
178
+ +1 1:-0.291667 2:1 3:0.333333 4:0.0566038 5:-0.520548 6:-1 7:-1 8:0.160305 9:-1 10:0.16129 12:-1 13:-1
179
+ +1 1:0.583333 2:1 3:1 4:-0.415094 5:-0.415525 6:1 7:-1 8:0.40458 9:-1 10:-0.935484 12:0.333333 13:1
180
+ -1 1:-0.125 2:1 3:0.333333 4:-0.339623 5:-0.680365 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
181
+ -1 1:-0.458333 2:1 3:0.333333 4:-0.509434 5:-0.479452 6:1 7:-1 8:0.877863 9:-1 10:-0.741935 11:1 12:-1 13:1
182
+ +1 1:0.125 2:-1 3:1 4:-0.245283 5:0.292237 6:-1 7:1 8:0.206107 9:1 10:-0.387097 12:0.333333 13:1
183
+ +1 1:-0.5 2:1 3:1 4:-0.698113 5:-0.789954 6:-1 7:1 8:0.328244 9:-1 10:-1 11:-1 12:-1 13:1
184
+ -1 1:-0.458333 2:-1 3:1 4:-0.849057 5:-0.365297 6:-1 7:1 8:-0.221374 9:-1 10:-0.806452 12:-1 13:-1
185
+ -1 2:1 3:0.333333 4:-0.320755 5:-0.452055 6:1 7:1 8:0.557252 9:-1 10:-1 11:-1 12:1 13:-1
186
+ -1 1:-0.416667 2:1 3:0.333333 4:-0.320755 5:-0.136986 6:-1 7:-1 8:0.389313 9:-1 10:-0.387097 11:-1 12:-0.333333 13:-1
187
+ +1 1:0.125 2:1 3:1 4:-0.283019 5:-0.73516 6:-1 7:1 8:-0.480916 9:1 10:-0.322581 12:-0.333333 13:0.5
188
+ -1 1:-0.0416667 2:1 3:1 4:-0.735849 5:-0.511416 6:1 7:-1 8:0.160305 9:-1 10:-0.967742 11:-1 12:1 13:1
189
+ -1 1:0.375 2:-1 3:1 4:-0.132075 5:0.223744 6:-1 7:1 8:0.312977 9:-1 10:-0.612903 12:-1 13:-1
190
+ +1 1:0.708333 2:1 3:0.333333 4:0.245283 5:-0.347032 6:-1 7:-1 8:-0.374046 9:1 10:-0.0645161 12:-0.333333 13:1
191
+ -1 1:0.0416667 2:1 3:1 4:-0.132075 5:-0.484018 6:-1 7:-1 8:0.358779 9:-1 10:-0.612903 11:-1 12:-1 13:-1
192
+ +1 1:0.708333 2:1 3:1 4:-0.0377358 5:-0.780822 6:-1 7:-1 8:-0.175573 9:1 10:-0.16129 11:1 12:-1 13:1
193
+ -1 1:0.0416667 2:1 3:-0.333333 4:-0.735849 5:-0.164384 6:-1 7:-1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:1
194
+ +1 1:-0.75 2:1 3:1 4:-0.396226 5:-0.287671 6:-1 7:1 8:0.29771 9:1 10:-1 11:-1 12:-1 13:1
195
+ -1 1:-0.208333 2:1 3:0.333333 4:-0.433962 5:-0.410959 6:1 7:-1 8:0.587786 9:-1 10:-1 11:-1 12:0.333333 13:-1
196
+ -1 1:0.0833333 2:-1 3:-0.333333 4:-0.226415 5:-0.43379 6:-1 7:1 8:0.374046 9:-1 10:-0.548387 12:-1 13:-1
197
+ -1 1:0.208333 2:-1 3:1 4:-0.886792 5:-0.442922 6:-1 7:1 8:-0.221374 9:-1 10:-0.677419 12:-1 13:-1
198
+ -1 1:0.0416667 2:-1 3:0.333333 4:-0.698113 5:-0.598174 6:-1 7:-1 8:0.328244 9:-1 10:-0.483871 12:-1 13:-1
199
+ -1 1:0.666667 2:-1 3:-1 4:-0.132075 5:-0.484018 6:-1 7:-1 8:0.221374 9:-1 10:-0.419355 11:-1 12:0.333333 13:-1
200
+ +1 1:1 2:1 3:1 4:-0.415094 5:-0.187215 6:-1 7:1 8:0.389313 9:1 10:-1 11:-1 12:1 13:-1
201
+ -1 1:0.625 2:1 3:0.333333 4:-0.54717 5:-0.310502 6:-1 7:-1 8:0.221374 9:-1 10:-0.677419 11:-1 12:-0.333333 13:1
202
+ +1 1:0.208333 2:1 3:1 4:-0.415094 5:-0.205479 6:-1 7:1 8:0.526718 9:-1 10:-1 11:-1 12:0.333333 13:1
203
+ +1 1:0.291667 2:1 3:1 4:-0.415094 5:-0.39726 6:-1 7:1 8:0.0687023 9:1 10:-0.0967742 12:-0.333333 13:1
204
+ +1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.210046 6:-1 7:-1 8:0.557252 9:1 10:-0.483871 11:-1 12:-1 13:1
205
+ +1 1:0.0833333 2:1 3:1 4:0.245283 5:-0.255708 6:-1 7:1 8:0.129771 9:1 10:-0.741935 12:-0.333333 13:1
206
+ -1 1:-0.0416667 2:1 3:-1 4:0.0943396 5:-0.214612 6:1 7:-1 8:0.633588 9:-1 10:-0.612903 12:-1 13:1
207
+ -1 1:0.291667 2:-1 3:0.333333 4:-0.849057 5:-0.123288 6:-1 7:-1 8:0.358779 9:-1 10:-1 11:-1 12:-0.333333 13:-1
208
+ -1 1:0.208333 2:1 3:0.333333 4:-0.792453 5:-0.479452 6:-1 7:1 8:0.267176 9:1 10:-0.806452 12:-1 13:1
209
+ +1 1:0.458333 2:1 3:0.333333 4:-0.415094 5:-0.164384 6:-1 7:-1 8:-0.0839695 9:1 10:-0.419355 12:-1 13:1
210
+ -1 1:-0.666667 2:1 3:0.333333 4:-0.320755 5:-0.43379 6:-1 7:-1 8:0.770992 9:-1 10:0.129032 11:1 12:-1 13:-1
211
+ +1 1:0.25 2:1 3:-1 4:0.433962 5:-0.260274 6:-1 7:1 8:0.343511 9:-1 10:-0.935484 12:-1 13:1
212
+ -1 1:-0.0833333 2:1 3:0.333333 4:-0.415094 5:-0.456621 6:1 7:1 8:0.450382 9:-1 10:-0.225806 12:-1 13:-1
213
+ -1 1:-0.416667 2:-1 3:0.333333 4:-0.471698 5:-0.60274 6:-1 7:-1 8:0.435115 9:-1 10:-0.935484 12:-1 13:-1
214
+ +1 1:0.208333 2:1 3:1 4:-0.358491 5:-0.589041 6:-1 7:1 8:-0.0839695 9:1 10:-0.290323 12:1 13:1
215
+ -1 1:-1 2:1 3:-0.333333 4:-0.320755 5:-0.643836 6:-1 7:1 8:1 9:-1 10:-1 11:-1 12:-1 13:-1
216
+ -1 1:-0.5 2:-1 3:-0.333333 4:-0.320755 5:-0.643836 6:-1 7:1 8:0.541985 9:-1 10:-0.548387 11:-1 12:-1 13:-1
217
+ -1 1:0.416667 2:-1 3:0.333333 4:-0.226415 5:-0.424658 6:-1 7:1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
218
+ -1 1:-0.0833333 2:1 3:0.333333 4:-1 5:-0.538813 6:-1 7:-1 8:0.267176 9:1 10:-1 11:-1 12:-0.333333 13:1
219
+ -1 1:0.0416667 2:1 3:0.333333 4:-0.509434 5:-0.39726 6:-1 7:1 8:0.160305 9:-1 10:-0.870968 12:-1 13:1
220
+ -1 1:-0.375 2:1 3:-0.333333 4:-0.509434 5:-0.570776 6:-1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
221
+ +1 1:0.0416667 2:1 3:1 4:-0.698113 5:-0.484018 6:-1 7:-1 8:-0.160305 9:1 10:-0.0967742 12:-0.333333 13:1
222
+ +1 1:0.5 2:1 3:1 4:-0.226415 5:-0.415525 6:-1 7:1 8:-0.145038 9:-1 10:-0.0967742 12:-0.333333 13:1
223
+ -1 1:0.166667 2:1 3:0.333333 4:0.0566038 5:-0.808219 6:-1 7:-1 8:0.572519 9:-1 10:-0.483871 11:-1 12:-1 13:-1
224
+ +1 1:0.416667 2:1 3:1 4:-0.320755 5:-0.0684932 6:1 7:1 8:-0.0687023 9:1 10:-0.419355 11:-1 12:1 13:1
225
+ -1 1:-0.75 2:-1 3:1 4:-0.169811 5:-0.739726 6:-1 7:-1 8:0.694656 9:-1 10:-0.548387 11:-1 12:-1 13:-1
226
+ -1 1:-0.5 2:1 3:-0.333333 4:-0.226415 5:-0.648402 6:-1 7:-1 8:-0.0687023 9:-1 10:-1 12:-1 13:0.5
227
+ +1 1:0.375 2:-1 3:0.333333 4:-0.320755 5:-0.374429 6:-1 7:-1 8:-0.603053 9:-1 10:-0.612903 12:-0.333333 13:1
228
+ +1 1:-0.416667 2:-1 3:1 4:-0.283019 5:-0.0182648 6:1 7:1 8:-0.00763359 9:1 10:-0.0322581 12:-1 13:1
229
+ -1 1:0.208333 2:-1 3:-1 4:0.0566038 5:-0.283105 6:1 7:1 8:0.389313 9:-1 10:-0.677419 11:-1 12:-1 13:-1
230
+ -1 1:-0.0416667 2:1 3:-1 4:-0.54717 5:-0.726027 6:-1 7:1 8:0.816794 9:-1 10:-1 12:-1 13:0.5
231
+ +1 1:0.333333 2:-1 3:1 4:-0.0377358 5:-0.173516 6:-1 7:1 8:0.145038 9:1 10:-0.677419 12:-1 13:1
232
+ +1 1:-0.583333 2:1 3:1 4:-0.54717 5:-0.575342 6:-1 7:-1 8:0.0534351 9:-1 10:-0.612903 12:-1 13:1
233
+ -1 1:-0.333333 2:1 3:1 4:-0.603774 5:-0.388128 6:-1 7:1 8:0.740458 9:-1 10:-1 11:-1 12:-1 13:-1
234
+ +1 1:-0.0416667 2:1 3:1 4:-0.358491 5:-0.410959 6:-1 7:-1 8:0.374046 9:1 10:-1 11:-1 12:-0.333333 13:1
235
+ -1 1:0.375 2:1 3:0.333333 4:-0.320755 5:-0.520548 6:-1 7:-1 8:0.145038 9:-1 10:-0.419355 12:1 13:1
236
+ +1 1:0.375 2:-1 3:1 4:0.245283 5:-0.826484 6:-1 7:1 8:0.129771 9:-1 10:1 11:1 12:1 13:1
237
+ -1 2:-1 3:1 4:-0.169811 5:-0.506849 6:-1 7:1 8:0.358779 9:-1 10:-1 11:-1 12:-1 13:-1
238
+ +1 1:-0.416667 2:1 3:1 4:-0.509434 5:-0.767123 6:-1 7:1 8:-0.251908 9:1 10:-0.193548 12:-1 13:1
239
+ -1 1:-0.25 2:1 3:0.333333 4:-0.169811 5:-0.401826 6:-1 7:1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:-1
240
+ -1 1:-0.0416667 2:1 3:-0.333333 4:-0.509434 5:-0.0913242 6:-1 7:-1 8:0.541985 9:-1 10:-0.935484 11:-1 12:-1 13:-1
241
+ +1 1:0.625 2:1 3:0.333333 4:0.622642 5:-0.324201 6:1 7:1 8:0.206107 9:1 10:-0.483871 12:-1 13:1
242
+ -1 1:-0.583333 2:1 3:0.333333 4:-0.132075 5:-0.109589 6:-1 7:1 8:0.694656 9:-1 10:-1 11:-1 12:-1 13:-1
243
+ -1 2:-1 3:1 4:-0.320755 5:-0.369863 6:-1 7:1 8:0.0992366 9:-1 10:-0.870968 12:-1 13:-1
244
+ +1 1:0.375 2:-1 3:1 4:-0.132075 5:-0.351598 6:-1 7:1 8:0.358779 9:-1 10:0.16129 11:1 12:0.333333 13:-1
245
+ -1 1:-0.0833333 2:-1 3:0.333333 4:-0.132075 5:-0.16895 6:-1 7:1 8:0.0839695 9:-1 10:-0.516129 11:-1 12:-0.333333 13:-1
246
+ +1 1:0.291667 2:1 3:1 4:-0.320755 5:-0.420091 6:-1 7:-1 8:0.114504 9:1 10:-0.548387 11:-1 12:-0.333333 13:1
247
+ +1 1:0.5 2:1 3:1 4:-0.698113 5:-0.442922 6:-1 7:1 8:0.328244 9:-1 10:-0.806452 11:-1 12:0.333333 13:0.5
248
+ -1 1:0.5 2:-1 3:0.333333 4:0.150943 5:-0.347032 6:-1 7:-1 8:0.175573 9:-1 10:-0.741935 11:-1 12:-1 13:-1
249
+ +1 1:0.291667 2:1 3:0.333333 4:-0.132075 5:-0.730594 6:-1 7:1 8:0.282443 9:-1 10:-0.0322581 12:-1 13:-1
250
+ +1 1:0.291667 2:1 3:1 4:-0.0377358 5:-0.287671 6:-1 7:1 8:0.0839695 9:1 10:-0.0967742 12:0.333333 13:1
251
+ +1 1:0.0416667 2:1 3:1 4:-0.509434 5:-0.716895 6:-1 7:-1 8:-0.358779 9:-1 10:-0.548387 12:-0.333333 13:1
252
+ -1 1:-0.375 2:1 3:-0.333333 4:-0.320755 5:-0.575342 6:-1 7:1 8:0.78626 9:-1 10:-1 11:-1 12:-1 13:-1
253
+ +1 1:-0.375 2:1 3:1 4:-0.660377 5:-0.251142 6:-1 7:1 8:0.251908 9:-1 10:-1 11:-1 12:-0.333333 13:-1
254
+ -1 1:-0.0833333 2:1 3:0.333333 4:-0.698113 5:-0.776256 6:-1 7:-1 8:-0.206107 9:-1 10:-0.806452 11:-1 12:-1 13:-1
255
+ -1 1:0.25 2:1 3:0.333333 4:0.0566038 5:-0.607306 6:1 7:-1 8:0.312977 9:-1 10:-0.483871 11:-1 12:-1 13:-1
256
+ -1 1:0.75 2:-1 3:-0.333333 4:0.245283 5:-0.196347 6:-1 7:-1 8:0.389313 9:-1 10:-0.870968 11:-1 12:0.333333 13:-1
257
+ -1 1:0.333333 2:1 3:0.333333 4:0.0566038 5:-0.465753 6:1 7:-1 8:0.00763359 9:1 10:-0.677419 12:-1 13:-1
258
+ +1 1:0.0833333 2:1 3:1 4:-0.283019 5:0.0365297 6:-1 7:-1 8:-0.0687023 9:1 10:-0.612903 12:-0.333333 13:1
259
+ +1 1:0.458333 2:1 3:0.333333 4:-0.132075 5:-0.0456621 6:-1 7:-1 8:0.328244 9:-1 10:-1 11:-1 12:-1 13:-1
260
+ -1 1:-0.416667 2:1 3:1 4:0.0566038 5:-0.447489 6:-1 7:-1 8:0.526718 9:-1 10:-0.516129 11:-1 12:-1 13:-1
261
+ -1 1:0.208333 2:-1 3:0.333333 4:-0.509434 5:-0.0228311 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
262
+ +1 1:0.291667 2:1 3:1 4:-0.320755 5:-0.634703 6:-1 7:1 8:-0.0687023 9:1 10:-0.225806 12:0.333333 13:1
263
+ +1 1:0.208333 2:1 3:-0.333333 4:-0.509434 5:-0.278539 6:-1 7:1 8:0.358779 9:-1 10:-0.419355 12:-1 13:-1
264
+ -1 1:-0.166667 2:1 3:-0.333333 4:-0.320755 5:-0.360731 6:-1 7:-1 8:0.526718 9:-1 10:-0.806452 11:-1 12:-1 13:-1
265
+ +1 1:-0.208333 2:1 3:-0.333333 4:-0.698113 5:-0.52968 6:-1 7:-1 8:0.480916 9:-1 10:-0.677419 11:1 12:-1 13:1
266
+ -1 1:-0.0416667 2:1 3:0.333333 4:0.471698 5:-0.666667 6:1 7:-1 8:0.389313 9:-1 10:-0.83871 11:-1 12:-1 13:1
267
+ -1 1:-0.375 2:1 3:-0.333333 4:-0.509434 5:-0.374429 6:-1 7:-1 8:0.557252 9:-1 10:-1 11:-1 12:-1 13:1
268
+ -1 1:0.125 2:-1 3:-0.333333 4:-0.132075 5:-0.232877 6:-1 7:1 8:0.251908 9:-1 10:-0.580645 12:-1 13:-1
269
+ -1 1:0.166667 2:1 3:1 4:-0.132075 5:-0.69863 6:-1 7:-1 8:0.175573 9:-1 10:-0.870968 12:-1 13:0.5
270
+ +1 1:0.583333 2:1 3:1 4:0.245283 5:-0.269406 6:-1 7:1 8:-0.435115 9:1 10:-0.516129 12:1 13:-1
@@ -0,0 +1,25 @@
1
+ .SUFFIXES: .class .java
2
+ FILES = libsvm/svm.class libsvm/svm_model.class libsvm/svm_node.class \
3
+ libsvm/svm_parameter.class libsvm/svm_problem.class \
4
+ libsvm/svm_print_interface.class \
5
+ svm_train.class svm_predict.class svm_toy.class svm_scale.class
6
+
7
+ #JAVAC = jikes
8
+ JAVAC_FLAGS = -target 1.5 -source 1.5
9
+ JAVAC = javac
10
+ # JAVAC_FLAGS =
11
+
12
+ all: $(FILES)
13
+ jar cvf libsvm.jar *.class libsvm/*.class
14
+
15
+ .java.class:
16
+ $(JAVAC) $(JAVAC_FLAGS) $<
17
+
18
+ libsvm/svm.java: libsvm/svm.m4
19
+ m4 libsvm/svm.m4 > libsvm/svm.java
20
+
21
+ clean:
22
+ rm -f libsvm/*.class *.class *.jar libsvm/*~ *~ libsvm/svm.java
23
+
24
+ dist: clean all
25
+ rm *.class libsvm/*.class
Binary file
@@ -0,0 +1,2776 @@
1
+
2
+
3
+
4
+
5
+ package libsvm;
6
+ import java.io.*;
7
+ import java.util.*;
8
+
9
+ //
10
+ // Kernel Cache
11
+ //
12
+ // l is the number of total data items
13
+ // size is the cache size limit in bytes
14
+ //
15
+ class Cache {
16
+ private final int l;
17
+ private long size;
18
+ private final class head_t
19
+ {
20
+ head_t prev, next; // a cicular list
21
+ float[] data;
22
+ int len; // data[0,len) is cached in this entry
23
+ }
24
+ private final head_t[] head;
25
+ private head_t lru_head;
26
+
27
+ Cache(int l_, long size_)
28
+ {
29
+ l = l_;
30
+ size = size_;
31
+ head = new head_t[l];
32
+ for(int i=0;i<l;i++) head[i] = new head_t();
33
+ size /= 4;
34
+ size -= l * (16/4); // sizeof(head_t) == 16
35
+ size = Math.max(size, 2* (long) l); // cache must be large enough for two columns
36
+ lru_head = new head_t();
37
+ lru_head.next = lru_head.prev = lru_head;
38
+ }
39
+
40
+ private void lru_delete(head_t h)
41
+ {
42
+ // delete from current location
43
+ h.prev.next = h.next;
44
+ h.next.prev = h.prev;
45
+ }
46
+
47
+ private void lru_insert(head_t h)
48
+ {
49
+ // insert to last position
50
+ h.next = lru_head;
51
+ h.prev = lru_head.prev;
52
+ h.prev.next = h;
53
+ h.next.prev = h;
54
+ }
55
+
56
+ // request data [0,len)
57
+ // return some position p where [p,len) need to be filled
58
+ // (p >= len if nothing needs to be filled)
59
+ // java: simulate pointer using single-element array
60
+ int get_data(int index, float[][] data, int len)
61
+ {
62
+ head_t h = head[index];
63
+ if(h.len > 0) lru_delete(h);
64
+ int more = len - h.len;
65
+
66
+ if(more > 0)
67
+ {
68
+ // free old space
69
+ while(size < more)
70
+ {
71
+ head_t old = lru_head.next;
72
+ lru_delete(old);
73
+ size += old.len;
74
+ old.data = null;
75
+ old.len = 0;
76
+ }
77
+
78
+ // allocate new space
79
+ float[] new_data = new float[len];
80
+ if(h.data != null) System.arraycopy(h.data,0,new_data,0,h.len);
81
+ h.data = new_data;
82
+ size -= more;
83
+ do {int _=h.len; h.len=len; len=_;} while(false);
84
+ }
85
+
86
+ lru_insert(h);
87
+ data[0] = h.data;
88
+ return len;
89
+ }
90
+
91
+ void swap_index(int i, int j)
92
+ {
93
+ if(i==j) return;
94
+
95
+ if(head[i].len > 0) lru_delete(head[i]);
96
+ if(head[j].len > 0) lru_delete(head[j]);
97
+ do {float[] _=head[i].data; head[i].data=head[j].data; head[j].data=_;} while(false);
98
+ do {int _=head[i].len; head[i].len=head[j].len; head[j].len=_;} while(false);
99
+ if(head[i].len > 0) lru_insert(head[i]);
100
+ if(head[j].len > 0) lru_insert(head[j]);
101
+
102
+ if(i>j) do {int _=i; i=j; j=_;} while(false);
103
+ for(head_t h = lru_head.next; h!=lru_head; h=h.next)
104
+ {
105
+ if(h.len > i)
106
+ {
107
+ if(h.len > j)
108
+ do {float _=h.data[i]; h.data[i]=h.data[j]; h.data[j]=_;} while(false);
109
+ else
110
+ {
111
+ // give up
112
+ lru_delete(h);
113
+ size += h.len;
114
+ h.data = null;
115
+ h.len = 0;
116
+ }
117
+ }
118
+ }
119
+ }
120
+ }
121
+
122
+ //
123
+ // Kernel evaluation
124
+ //
125
+ // the static method k_function is for doing single kernel evaluation
126
+ // the constructor of Kernel prepares to calculate the l*l kernel matrix
127
+ // the member function get_Q is for getting one column from the Q Matrix
128
+ //
129
+ abstract class QMatrix {
130
+ abstract float[] get_Q(int column, int len);
131
+ abstract double[] get_QD();
132
+ abstract void swap_index(int i, int j);
133
+ };
134
+
135
+ abstract class Kernel extends QMatrix {
136
+ private svm_node[][] x;
137
+ private final double[] x_square;
138
+
139
+ // svm_parameter
140
+ private final int kernel_type;
141
+ private final int degree;
142
+ private final double gamma;
143
+ private final double coef0;
144
+
145
+ abstract float[] get_Q(int column, int len);
146
+ abstract double[] get_QD();
147
+
148
+ void swap_index(int i, int j)
149
+ {
150
+ do {svm_node[] _=x[i]; x[i]=x[j]; x[j]=_;} while(false);
151
+ if(x_square != null) do {double _=x_square[i]; x_square[i]=x_square[j]; x_square[j]=_;} while(false);
152
+ }
153
+
154
+ private static double powi(double base, int times)
155
+ {
156
+ double tmp = base, ret = 1.0;
157
+
158
+ for(int t=times; t>0; t/=2)
159
+ {
160
+ if(t%2==1) ret*=tmp;
161
+ tmp = tmp * tmp;
162
+ }
163
+ return ret;
164
+ }
165
+
166
+ double kernel_function(int i, int j)
167
+ {
168
+ switch(kernel_type)
169
+ {
170
+ case svm_parameter.LINEAR:
171
+ return dot(x[i],x[j]);
172
+ case svm_parameter.POLY:
173
+ return powi(gamma*dot(x[i],x[j])+coef0,degree);
174
+ case svm_parameter.RBF:
175
+ return Math.exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
176
+ case svm_parameter.SIGMOID:
177
+ return Math.tanh(gamma*dot(x[i],x[j])+coef0);
178
+ case svm_parameter.PRECOMPUTED:
179
+ return x[i][(int)(x[j][0].value)].value;
180
+ default:
181
+ return 0; // java
182
+ }
183
+ }
184
+
185
+ Kernel(int l, svm_node[][] x_, svm_parameter param)
186
+ {
187
+ this.kernel_type = param.kernel_type;
188
+ this.degree = param.degree;
189
+ this.gamma = param.gamma;
190
+ this.coef0 = param.coef0;
191
+
192
+ x = (svm_node[][])x_.clone();
193
+
194
+ if(kernel_type == svm_parameter.RBF)
195
+ {
196
+ x_square = new double[l];
197
+ for(int i=0;i<l;i++)
198
+ x_square[i] = dot(x[i],x[i]);
199
+ }
200
+ else x_square = null;
201
+ }
202
+
203
+ static double dot(svm_node[] x, svm_node[] y)
204
+ {
205
+ double sum = 0;
206
+ int xlen = x.length;
207
+ int ylen = y.length;
208
+ int i = 0;
209
+ int j = 0;
210
+ while(i < xlen && j < ylen)
211
+ {
212
+ if(x[i].index == y[j].index)
213
+ sum += x[i++].value * y[j++].value;
214
+ else
215
+ {
216
+ if(x[i].index > y[j].index)
217
+ ++j;
218
+ else
219
+ ++i;
220
+ }
221
+ }
222
+ return sum;
223
+ }
224
+
225
+ static double k_function(svm_node[] x, svm_node[] y,
226
+ svm_parameter param)
227
+ {
228
+ switch(param.kernel_type)
229
+ {
230
+ case svm_parameter.LINEAR:
231
+ return dot(x,y);
232
+ case svm_parameter.POLY:
233
+ return powi(param.gamma*dot(x,y)+param.coef0,param.degree);
234
+ case svm_parameter.RBF:
235
+ {
236
+ double sum = 0;
237
+ int xlen = x.length;
238
+ int ylen = y.length;
239
+ int i = 0;
240
+ int j = 0;
241
+ while(i < xlen && j < ylen)
242
+ {
243
+ if(x[i].index == y[j].index)
244
+ {
245
+ double d = x[i++].value - y[j++].value;
246
+ sum += d*d;
247
+ }
248
+ else if(x[i].index > y[j].index)
249
+ {
250
+ sum += y[j].value * y[j].value;
251
+ ++j;
252
+ }
253
+ else
254
+ {
255
+ sum += x[i].value * x[i].value;
256
+ ++i;
257
+ }
258
+ }
259
+
260
+ while(i < xlen)
261
+ {
262
+ sum += x[i].value * x[i].value;
263
+ ++i;
264
+ }
265
+
266
+ while(j < ylen)
267
+ {
268
+ sum += y[j].value * y[j].value;
269
+ ++j;
270
+ }
271
+
272
+ return Math.exp(-param.gamma*sum);
273
+ }
274
+ case svm_parameter.SIGMOID:
275
+ return Math.tanh(param.gamma*dot(x,y)+param.coef0);
276
+ case svm_parameter.PRECOMPUTED:
277
+ return x[(int)(y[0].value)].value;
278
+ default:
279
+ return 0; // java
280
+ }
281
+ }
282
+ }
283
+
284
+ // An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918
285
+ // Solves:
286
+ //
287
+ // min 0.5(\alpha^T Q \alpha) + p^T \alpha
288
+ //
289
+ // y^T \alpha = \delta
290
+ // y_i = +1 or -1
291
+ // 0 <= alpha_i <= Cp for y_i = 1
292
+ // 0 <= alpha_i <= Cn for y_i = -1
293
+ //
294
+ // Given:
295
+ //
296
+ // Q, p, y, Cp, Cn, and an initial feasible point \alpha
297
+ // l is the size of vectors and matrices
298
+ // eps is the stopping tolerance
299
+ //
300
+ // solution will be put in \alpha, objective value will be put in obj
301
+ //
302
+ class Solver {
303
+ int active_size;
304
+ byte[] y;
305
+ double[] G; // gradient of objective function
306
+ static final byte LOWER_BOUND = 0;
307
+ static final byte UPPER_BOUND = 1;
308
+ static final byte FREE = 2;
309
+ byte[] alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE
310
+ double[] alpha;
311
+ QMatrix Q;
312
+ double[] QD;
313
+ double eps;
314
+ double Cp,Cn;
315
+ double[] p;
316
+ int[] active_set;
317
+ double[] G_bar; // gradient, if we treat free variables as 0
318
+ int l;
319
+ boolean unshrink; // XXX
320
+
321
+ static final double INF = java.lang.Double.POSITIVE_INFINITY;
322
+
323
+ double get_C(int i)
324
+ {
325
+ return (y[i] > 0)? Cp : Cn;
326
+ }
327
+ void update_alpha_status(int i)
328
+ {
329
+ if(alpha[i] >= get_C(i))
330
+ alpha_status[i] = UPPER_BOUND;
331
+ else if(alpha[i] <= 0)
332
+ alpha_status[i] = LOWER_BOUND;
333
+ else alpha_status[i] = FREE;
334
+ }
335
+ boolean is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; }
336
+ boolean is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; }
337
+ boolean is_free(int i) { return alpha_status[i] == FREE; }
338
+
339
+ // java: information about solution except alpha,
340
+ // because we cannot return multiple values otherwise...
341
+ static class SolutionInfo {
342
+ double obj;
343
+ double rho;
344
+ double upper_bound_p;
345
+ double upper_bound_n;
346
+ double r; // for Solver_NU
347
+ }
348
+
349
+ void swap_index(int i, int j)
350
+ {
351
+ Q.swap_index(i,j);
352
+ do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
353
+ do {double _=G[i]; G[i]=G[j]; G[j]=_;} while(false);
354
+ do {byte _=alpha_status[i]; alpha_status[i]=alpha_status[j]; alpha_status[j]=_;} while(false);
355
+ do {double _=alpha[i]; alpha[i]=alpha[j]; alpha[j]=_;} while(false);
356
+ do {double _=p[i]; p[i]=p[j]; p[j]=_;} while(false);
357
+ do {int _=active_set[i]; active_set[i]=active_set[j]; active_set[j]=_;} while(false);
358
+ do {double _=G_bar[i]; G_bar[i]=G_bar[j]; G_bar[j]=_;} while(false);
359
+ }
360
+
361
+ void reconstruct_gradient()
362
+ {
363
+ // reconstruct inactive elements of G from G_bar and free variables
364
+
365
+ if(active_size == l) return;
366
+
367
+ int i,j;
368
+ int nr_free = 0;
369
+
370
+ for(j=active_size;j<l;j++)
371
+ G[j] = G_bar[j] + p[j];
372
+
373
+ for(j=0;j<active_size;j++)
374
+ if(is_free(j))
375
+ nr_free++;
376
+
377
+ if(2*nr_free < active_size)
378
+ svm.info("\nWarning: using -h 0 may be faster\n");
379
+
380
+ if (nr_free*l > 2*active_size*(l-active_size))
381
+ {
382
+ for(i=active_size;i<l;i++)
383
+ {
384
+ float[] Q_i = Q.get_Q(i,active_size);
385
+ for(j=0;j<active_size;j++)
386
+ if(is_free(j))
387
+ G[i] += alpha[j] * Q_i[j];
388
+ }
389
+ }
390
+ else
391
+ {
392
+ for(i=0;i<active_size;i++)
393
+ if(is_free(i))
394
+ {
395
+ float[] Q_i = Q.get_Q(i,l);
396
+ double alpha_i = alpha[i];
397
+ for(j=active_size;j<l;j++)
398
+ G[j] += alpha_i * Q_i[j];
399
+ }
400
+ }
401
+ }
402
+
403
+ void Solve(int l, QMatrix Q, double[] p_, byte[] y_,
404
+ double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, int shrinking)
405
+ {
406
+ this.l = l;
407
+ this.Q = Q;
408
+ QD = Q.get_QD();
409
+ p = (double[])p_.clone();
410
+ y = (byte[])y_.clone();
411
+ alpha = (double[])alpha_.clone();
412
+ this.Cp = Cp;
413
+ this.Cn = Cn;
414
+ this.eps = eps;
415
+ this.unshrink = false;
416
+
417
+ // initialize alpha_status
418
+ {
419
+ alpha_status = new byte[l];
420
+ for(int i=0;i<l;i++)
421
+ update_alpha_status(i);
422
+ }
423
+
424
+ // initialize active set (for shrinking)
425
+ {
426
+ active_set = new int[l];
427
+ for(int i=0;i<l;i++)
428
+ active_set[i] = i;
429
+ active_size = l;
430
+ }
431
+
432
+ // initialize gradient
433
+ {
434
+ G = new double[l];
435
+ G_bar = new double[l];
436
+ int i;
437
+ for(i=0;i<l;i++)
438
+ {
439
+ G[i] = p[i];
440
+ G_bar[i] = 0;
441
+ }
442
+ for(i=0;i<l;i++)
443
+ if(!is_lower_bound(i))
444
+ {
445
+ float[] Q_i = Q.get_Q(i,l);
446
+ double alpha_i = alpha[i];
447
+ int j;
448
+ for(j=0;j<l;j++)
449
+ G[j] += alpha_i*Q_i[j];
450
+ if(is_upper_bound(i))
451
+ for(j=0;j<l;j++)
452
+ G_bar[j] += get_C(i) * Q_i[j];
453
+ }
454
+ }
455
+
456
+ // optimization step
457
+
458
+ int iter = 0;
459
+ int counter = Math.min(l,1000)+1;
460
+ int[] working_set = new int[2];
461
+
462
+ while(true)
463
+ {
464
+ // show progress and do shrinking
465
+
466
+ if(--counter == 0)
467
+ {
468
+ counter = Math.min(l,1000);
469
+ if(shrinking!=0) do_shrinking();
470
+ svm.info(".");
471
+ }
472
+
473
+ if(select_working_set(working_set)!=0)
474
+ {
475
+ // reconstruct the whole gradient
476
+ reconstruct_gradient();
477
+ // reset active set size and check
478
+ active_size = l;
479
+ svm.info("*");
480
+ if(select_working_set(working_set)!=0)
481
+ break;
482
+ else
483
+ counter = 1; // do shrinking next iteration
484
+ }
485
+
486
+ int i = working_set[0];
487
+ int j = working_set[1];
488
+
489
+ ++iter;
490
+
491
+ // update alpha[i] and alpha[j], handle bounds carefully
492
+
493
+ float[] Q_i = Q.get_Q(i,active_size);
494
+ float[] Q_j = Q.get_Q(j,active_size);
495
+
496
+ double C_i = get_C(i);
497
+ double C_j = get_C(j);
498
+
499
+ double old_alpha_i = alpha[i];
500
+ double old_alpha_j = alpha[j];
501
+
502
+ if(y[i]!=y[j])
503
+ {
504
+ double quad_coef = QD[i]+QD[j]+2*Q_i[j];
505
+ if (quad_coef <= 0)
506
+ quad_coef = 1e-12;
507
+ double delta = (-G[i]-G[j])/quad_coef;
508
+ double diff = alpha[i] - alpha[j];
509
+ alpha[i] += delta;
510
+ alpha[j] += delta;
511
+
512
+ if(diff > 0)
513
+ {
514
+ if(alpha[j] < 0)
515
+ {
516
+ alpha[j] = 0;
517
+ alpha[i] = diff;
518
+ }
519
+ }
520
+ else
521
+ {
522
+ if(alpha[i] < 0)
523
+ {
524
+ alpha[i] = 0;
525
+ alpha[j] = -diff;
526
+ }
527
+ }
528
+ if(diff > C_i - C_j)
529
+ {
530
+ if(alpha[i] > C_i)
531
+ {
532
+ alpha[i] = C_i;
533
+ alpha[j] = C_i - diff;
534
+ }
535
+ }
536
+ else
537
+ {
538
+ if(alpha[j] > C_j)
539
+ {
540
+ alpha[j] = C_j;
541
+ alpha[i] = C_j + diff;
542
+ }
543
+ }
544
+ }
545
+ else
546
+ {
547
+ double quad_coef = QD[i]+QD[j]-2*Q_i[j];
548
+ if (quad_coef <= 0)
549
+ quad_coef = 1e-12;
550
+ double delta = (G[i]-G[j])/quad_coef;
551
+ double sum = alpha[i] + alpha[j];
552
+ alpha[i] -= delta;
553
+ alpha[j] += delta;
554
+
555
+ if(sum > C_i)
556
+ {
557
+ if(alpha[i] > C_i)
558
+ {
559
+ alpha[i] = C_i;
560
+ alpha[j] = sum - C_i;
561
+ }
562
+ }
563
+ else
564
+ {
565
+ if(alpha[j] < 0)
566
+ {
567
+ alpha[j] = 0;
568
+ alpha[i] = sum;
569
+ }
570
+ }
571
+ if(sum > C_j)
572
+ {
573
+ if(alpha[j] > C_j)
574
+ {
575
+ alpha[j] = C_j;
576
+ alpha[i] = sum - C_j;
577
+ }
578
+ }
579
+ else
580
+ {
581
+ if(alpha[i] < 0)
582
+ {
583
+ alpha[i] = 0;
584
+ alpha[j] = sum;
585
+ }
586
+ }
587
+ }
588
+
589
+ // update G
590
+
591
+ double delta_alpha_i = alpha[i] - old_alpha_i;
592
+ double delta_alpha_j = alpha[j] - old_alpha_j;
593
+
594
+ for(int k=0;k<active_size;k++)
595
+ {
596
+ G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
597
+ }
598
+
599
+ // update alpha_status and G_bar
600
+
601
+ {
602
+ boolean ui = is_upper_bound(i);
603
+ boolean uj = is_upper_bound(j);
604
+ update_alpha_status(i);
605
+ update_alpha_status(j);
606
+ int k;
607
+ if(ui != is_upper_bound(i))
608
+ {
609
+ Q_i = Q.get_Q(i,l);
610
+ if(ui)
611
+ for(k=0;k<l;k++)
612
+ G_bar[k] -= C_i * Q_i[k];
613
+ else
614
+ for(k=0;k<l;k++)
615
+ G_bar[k] += C_i * Q_i[k];
616
+ }
617
+
618
+ if(uj != is_upper_bound(j))
619
+ {
620
+ Q_j = Q.get_Q(j,l);
621
+ if(uj)
622
+ for(k=0;k<l;k++)
623
+ G_bar[k] -= C_j * Q_j[k];
624
+ else
625
+ for(k=0;k<l;k++)
626
+ G_bar[k] += C_j * Q_j[k];
627
+ }
628
+ }
629
+
630
+ }
631
+
632
+ // calculate rho
633
+
634
+ si.rho = calculate_rho();
635
+
636
+ // calculate objective value
637
+ {
638
+ double v = 0;
639
+ int i;
640
+ for(i=0;i<l;i++)
641
+ v += alpha[i] * (G[i] + p[i]);
642
+
643
+ si.obj = v/2;
644
+ }
645
+
646
+ // put back the solution
647
+ {
648
+ for(int i=0;i<l;i++)
649
+ alpha_[active_set[i]] = alpha[i];
650
+ }
651
+
652
+ si.upper_bound_p = Cp;
653
+ si.upper_bound_n = Cn;
654
+
655
+ svm.info("\noptimization finished, #iter = "+iter+"\n");
656
+ }
657
+
658
+ // return 1 if already optimal, return 0 otherwise
659
+ int select_working_set(int[] working_set)
660
+ {
661
+ // return i,j such that
662
+ // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
663
+ // j: mimimizes the decrease of obj value
664
+ // (if quadratic coefficeint <= 0, replace it with tau)
665
+ // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
666
+
667
+ double Gmax = -INF;
668
+ double Gmax2 = -INF;
669
+ int Gmax_idx = -1;
670
+ int Gmin_idx = -1;
671
+ double obj_diff_min = INF;
672
+
673
+ for(int t=0;t<active_size;t++)
674
+ if(y[t]==+1)
675
+ {
676
+ if(!is_upper_bound(t))
677
+ if(-G[t] >= Gmax)
678
+ {
679
+ Gmax = -G[t];
680
+ Gmax_idx = t;
681
+ }
682
+ }
683
+ else
684
+ {
685
+ if(!is_lower_bound(t))
686
+ if(G[t] >= Gmax)
687
+ {
688
+ Gmax = G[t];
689
+ Gmax_idx = t;
690
+ }
691
+ }
692
+
693
+ int i = Gmax_idx;
694
+ float[] Q_i = null;
695
+ if(i != -1) // null Q_i not accessed: Gmax=-INF if i=-1
696
+ Q_i = Q.get_Q(i,active_size);
697
+
698
+ for(int j=0;j<active_size;j++)
699
+ {
700
+ if(y[j]==+1)
701
+ {
702
+ if (!is_lower_bound(j))
703
+ {
704
+ double grad_diff=Gmax+G[j];
705
+ if (G[j] >= Gmax2)
706
+ Gmax2 = G[j];
707
+ if (grad_diff > 0)
708
+ {
709
+ double obj_diff;
710
+ double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j];
711
+ if (quad_coef > 0)
712
+ obj_diff = -(grad_diff*grad_diff)/quad_coef;
713
+ else
714
+ obj_diff = -(grad_diff*grad_diff)/1e-12;
715
+
716
+ if (obj_diff <= obj_diff_min)
717
+ {
718
+ Gmin_idx=j;
719
+ obj_diff_min = obj_diff;
720
+ }
721
+ }
722
+ }
723
+ }
724
+ else
725
+ {
726
+ if (!is_upper_bound(j))
727
+ {
728
+ double grad_diff= Gmax-G[j];
729
+ if (-G[j] >= Gmax2)
730
+ Gmax2 = -G[j];
731
+ if (grad_diff > 0)
732
+ {
733
+ double obj_diff;
734
+ double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j];
735
+ if (quad_coef > 0)
736
+ obj_diff = -(grad_diff*grad_diff)/quad_coef;
737
+ else
738
+ obj_diff = -(grad_diff*grad_diff)/1e-12;
739
+
740
+ if (obj_diff <= obj_diff_min)
741
+ {
742
+ Gmin_idx=j;
743
+ obj_diff_min = obj_diff;
744
+ }
745
+ }
746
+ }
747
+ }
748
+ }
749
+
750
+ if(Gmax+Gmax2 < eps)
751
+ return 1;
752
+
753
+ working_set[0] = Gmax_idx;
754
+ working_set[1] = Gmin_idx;
755
+ return 0;
756
+ }
757
+
758
+ private boolean be_shrunk(int i, double Gmax1, double Gmax2)
759
+ {
760
+ if(is_upper_bound(i))
761
+ {
762
+ if(y[i]==+1)
763
+ return(-G[i] > Gmax1);
764
+ else
765
+ return(-G[i] > Gmax2);
766
+ }
767
+ else if(is_lower_bound(i))
768
+ {
769
+ if(y[i]==+1)
770
+ return(G[i] > Gmax2);
771
+ else
772
+ return(G[i] > Gmax1);
773
+ }
774
+ else
775
+ return(false);
776
+ }
777
+
778
+ void do_shrinking()
779
+ {
780
+ int i;
781
+ double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) }
782
+ double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) }
783
+
784
+ // find maximal violating pair first
785
+ for(i=0;i<active_size;i++)
786
+ {
787
+ if(y[i]==+1)
788
+ {
789
+ if(!is_upper_bound(i))
790
+ {
791
+ if(-G[i] >= Gmax1)
792
+ Gmax1 = -G[i];
793
+ }
794
+ if(!is_lower_bound(i))
795
+ {
796
+ if(G[i] >= Gmax2)
797
+ Gmax2 = G[i];
798
+ }
799
+ }
800
+ else
801
+ {
802
+ if(!is_upper_bound(i))
803
+ {
804
+ if(-G[i] >= Gmax2)
805
+ Gmax2 = -G[i];
806
+ }
807
+ if(!is_lower_bound(i))
808
+ {
809
+ if(G[i] >= Gmax1)
810
+ Gmax1 = G[i];
811
+ }
812
+ }
813
+ }
814
+
815
+ if(unshrink == false && Gmax1 + Gmax2 <= eps*10)
816
+ {
817
+ unshrink = true;
818
+ reconstruct_gradient();
819
+ active_size = l;
820
+ }
821
+
822
+ for(i=0;i<active_size;i++)
823
+ if (be_shrunk(i, Gmax1, Gmax2))
824
+ {
825
+ active_size--;
826
+ while (active_size > i)
827
+ {
828
+ if (!be_shrunk(active_size, Gmax1, Gmax2))
829
+ {
830
+ swap_index(i,active_size);
831
+ break;
832
+ }
833
+ active_size--;
834
+ }
835
+ }
836
+ }
837
+
838
+ double calculate_rho()
839
+ {
840
+ double r;
841
+ int nr_free = 0;
842
+ double ub = INF, lb = -INF, sum_free = 0;
843
+ for(int i=0;i<active_size;i++)
844
+ {
845
+ double yG = y[i]*G[i];
846
+
847
+ if(is_lower_bound(i))
848
+ {
849
+ if(y[i] > 0)
850
+ ub = Math.min(ub,yG);
851
+ else
852
+ lb = Math.max(lb,yG);
853
+ }
854
+ else if(is_upper_bound(i))
855
+ {
856
+ if(y[i] < 0)
857
+ ub = Math.min(ub,yG);
858
+ else
859
+ lb = Math.max(lb,yG);
860
+ }
861
+ else
862
+ {
863
+ ++nr_free;
864
+ sum_free += yG;
865
+ }
866
+ }
867
+
868
+ if(nr_free>0)
869
+ r = sum_free/nr_free;
870
+ else
871
+ r = (ub+lb)/2;
872
+
873
+ return r;
874
+ }
875
+
876
+ }
877
+
878
+ //
879
+ // Solver for nu-svm classification and regression
880
+ //
881
+ // additional constraint: e^T \alpha = constant
882
+ //
883
+ final class Solver_NU extends Solver
884
+ {
885
+ private SolutionInfo si;
886
+
887
+ void Solve(int l, QMatrix Q, double[] p, byte[] y,
888
+ double[] alpha, double Cp, double Cn, double eps,
889
+ SolutionInfo si, int shrinking)
890
+ {
891
+ this.si = si;
892
+ super.Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking);
893
+ }
894
+
895
+ // return 1 if already optimal, return 0 otherwise
896
+ int select_working_set(int[] working_set)
897
+ {
898
+ // return i,j such that y_i = y_j and
899
+ // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha)
900
+ // j: minimizes the decrease of obj value
901
+ // (if quadratic coefficeint <= 0, replace it with tau)
902
+ // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha)
903
+
904
+ double Gmaxp = -INF;
905
+ double Gmaxp2 = -INF;
906
+ int Gmaxp_idx = -1;
907
+
908
+ double Gmaxn = -INF;
909
+ double Gmaxn2 = -INF;
910
+ int Gmaxn_idx = -1;
911
+
912
+ int Gmin_idx = -1;
913
+ double obj_diff_min = INF;
914
+
915
+ for(int t=0;t<active_size;t++)
916
+ if(y[t]==+1)
917
+ {
918
+ if(!is_upper_bound(t))
919
+ if(-G[t] >= Gmaxp)
920
+ {
921
+ Gmaxp = -G[t];
922
+ Gmaxp_idx = t;
923
+ }
924
+ }
925
+ else
926
+ {
927
+ if(!is_lower_bound(t))
928
+ if(G[t] >= Gmaxn)
929
+ {
930
+ Gmaxn = G[t];
931
+ Gmaxn_idx = t;
932
+ }
933
+ }
934
+
935
+ int ip = Gmaxp_idx;
936
+ int in = Gmaxn_idx;
937
+ float[] Q_ip = null;
938
+ float[] Q_in = null;
939
+ if(ip != -1) // null Q_ip not accessed: Gmaxp=-INF if ip=-1
940
+ Q_ip = Q.get_Q(ip,active_size);
941
+ if(in != -1)
942
+ Q_in = Q.get_Q(in,active_size);
943
+
944
+ for(int j=0;j<active_size;j++)
945
+ {
946
+ if(y[j]==+1)
947
+ {
948
+ if (!is_lower_bound(j))
949
+ {
950
+ double grad_diff=Gmaxp+G[j];
951
+ if (G[j] >= Gmaxp2)
952
+ Gmaxp2 = G[j];
953
+ if (grad_diff > 0)
954
+ {
955
+ double obj_diff;
956
+ double quad_coef = QD[ip]+QD[j]-2*Q_ip[j];
957
+ if (quad_coef > 0)
958
+ obj_diff = -(grad_diff*grad_diff)/quad_coef;
959
+ else
960
+ obj_diff = -(grad_diff*grad_diff)/1e-12;
961
+
962
+ if (obj_diff <= obj_diff_min)
963
+ {
964
+ Gmin_idx=j;
965
+ obj_diff_min = obj_diff;
966
+ }
967
+ }
968
+ }
969
+ }
970
+ else
971
+ {
972
+ if (!is_upper_bound(j))
973
+ {
974
+ double grad_diff=Gmaxn-G[j];
975
+ if (-G[j] >= Gmaxn2)
976
+ Gmaxn2 = -G[j];
977
+ if (grad_diff > 0)
978
+ {
979
+ double obj_diff;
980
+ double quad_coef = QD[in]+QD[j]-2*Q_in[j];
981
+ if (quad_coef > 0)
982
+ obj_diff = -(grad_diff*grad_diff)/quad_coef;
983
+ else
984
+ obj_diff = -(grad_diff*grad_diff)/1e-12;
985
+
986
+ if (obj_diff <= obj_diff_min)
987
+ {
988
+ Gmin_idx=j;
989
+ obj_diff_min = obj_diff;
990
+ }
991
+ }
992
+ }
993
+ }
994
+ }
995
+
996
+ if(Math.max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps)
997
+ return 1;
998
+
999
+ if(y[Gmin_idx] == +1)
1000
+ working_set[0] = Gmaxp_idx;
1001
+ else
1002
+ working_set[0] = Gmaxn_idx;
1003
+ working_set[1] = Gmin_idx;
1004
+
1005
+ return 0;
1006
+ }
1007
+
1008
+ private boolean be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4)
1009
+ {
1010
+ if(is_upper_bound(i))
1011
+ {
1012
+ if(y[i]==+1)
1013
+ return(-G[i] > Gmax1);
1014
+ else
1015
+ return(-G[i] > Gmax4);
1016
+ }
1017
+ else if(is_lower_bound(i))
1018
+ {
1019
+ if(y[i]==+1)
1020
+ return(G[i] > Gmax2);
1021
+ else
1022
+ return(G[i] > Gmax3);
1023
+ }
1024
+ else
1025
+ return(false);
1026
+ }
1027
+
1028
+ void do_shrinking()
1029
+ {
1030
+ double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) }
1031
+ double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) }
1032
+ double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) }
1033
+ double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) }
1034
+
1035
+ // find maximal violating pair first
1036
+ int i;
1037
+ for(i=0;i<active_size;i++)
1038
+ {
1039
+ if(!is_upper_bound(i))
1040
+ {
1041
+ if(y[i]==+1)
1042
+ {
1043
+ if(-G[i] > Gmax1) Gmax1 = -G[i];
1044
+ }
1045
+ else if(-G[i] > Gmax4) Gmax4 = -G[i];
1046
+ }
1047
+ if(!is_lower_bound(i))
1048
+ {
1049
+ if(y[i]==+1)
1050
+ {
1051
+ if(G[i] > Gmax2) Gmax2 = G[i];
1052
+ }
1053
+ else if(G[i] > Gmax3) Gmax3 = G[i];
1054
+ }
1055
+ }
1056
+
1057
+ if(unshrink == false && Math.max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10)
1058
+ {
1059
+ unshrink = true;
1060
+ reconstruct_gradient();
1061
+ active_size = l;
1062
+ }
1063
+
1064
+ for(i=0;i<active_size;i++)
1065
+ if (be_shrunk(i, Gmax1, Gmax2, Gmax3, Gmax4))
1066
+ {
1067
+ active_size--;
1068
+ while (active_size > i)
1069
+ {
1070
+ if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4))
1071
+ {
1072
+ swap_index(i,active_size);
1073
+ break;
1074
+ }
1075
+ active_size--;
1076
+ }
1077
+ }
1078
+ }
1079
+
1080
+ double calculate_rho()
1081
+ {
1082
+ int nr_free1 = 0,nr_free2 = 0;
1083
+ double ub1 = INF, ub2 = INF;
1084
+ double lb1 = -INF, lb2 = -INF;
1085
+ double sum_free1 = 0, sum_free2 = 0;
1086
+
1087
+ for(int i=0;i<active_size;i++)
1088
+ {
1089
+ if(y[i]==+1)
1090
+ {
1091
+ if(is_lower_bound(i))
1092
+ ub1 = Math.min(ub1,G[i]);
1093
+ else if(is_upper_bound(i))
1094
+ lb1 = Math.max(lb1,G[i]);
1095
+ else
1096
+ {
1097
+ ++nr_free1;
1098
+ sum_free1 += G[i];
1099
+ }
1100
+ }
1101
+ else
1102
+ {
1103
+ if(is_lower_bound(i))
1104
+ ub2 = Math.min(ub2,G[i]);
1105
+ else if(is_upper_bound(i))
1106
+ lb2 = Math.max(lb2,G[i]);
1107
+ else
1108
+ {
1109
+ ++nr_free2;
1110
+ sum_free2 += G[i];
1111
+ }
1112
+ }
1113
+ }
1114
+
1115
+ double r1,r2;
1116
+ if(nr_free1 > 0)
1117
+ r1 = sum_free1/nr_free1;
1118
+ else
1119
+ r1 = (ub1+lb1)/2;
1120
+
1121
+ if(nr_free2 > 0)
1122
+ r2 = sum_free2/nr_free2;
1123
+ else
1124
+ r2 = (ub2+lb2)/2;
1125
+
1126
+ si.r = (r1+r2)/2;
1127
+ return (r1-r2)/2;
1128
+ }
1129
+ }
1130
+
1131
+ //
1132
+ // Q matrices for various formulations
1133
+ //
1134
+ class SVC_Q extends Kernel
1135
+ {
1136
+ private final byte[] y;
1137
+ private final Cache cache;
1138
+ private final double[] QD;
1139
+
1140
+ SVC_Q(svm_problem prob, svm_parameter param, byte[] y_)
1141
+ {
1142
+ super(prob.l, prob.x, param);
1143
+ y = (byte[])y_.clone();
1144
+ cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
1145
+ QD = new double[prob.l];
1146
+ for(int i=0;i<prob.l;i++)
1147
+ QD[i] = kernel_function(i,i);
1148
+ }
1149
+
1150
+ float[] get_Q(int i, int len)
1151
+ {
1152
+ float[][] data = new float[1][];
1153
+ int start, j;
1154
+ if((start = cache.get_data(i,data,len)) < len)
1155
+ {
1156
+ for(j=start;j<len;j++)
1157
+ data[0][j] = (float)(y[i]*y[j]*kernel_function(i,j));
1158
+ }
1159
+ return data[0];
1160
+ }
1161
+
1162
+ double[] get_QD()
1163
+ {
1164
+ return QD;
1165
+ }
1166
+
1167
+ void swap_index(int i, int j)
1168
+ {
1169
+ cache.swap_index(i,j);
1170
+ super.swap_index(i,j);
1171
+ do {byte _=y[i]; y[i]=y[j]; y[j]=_;} while(false);
1172
+ do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
1173
+ }
1174
+ }
1175
+
1176
+ class ONE_CLASS_Q extends Kernel
1177
+ {
1178
+ private final Cache cache;
1179
+ private final double[] QD;
1180
+
1181
+ ONE_CLASS_Q(svm_problem prob, svm_parameter param)
1182
+ {
1183
+ super(prob.l, prob.x, param);
1184
+ cache = new Cache(prob.l,(long)(param.cache_size*(1<<20)));
1185
+ QD = new double[prob.l];
1186
+ for(int i=0;i<prob.l;i++)
1187
+ QD[i] = kernel_function(i,i);
1188
+ }
1189
+
1190
+ float[] get_Q(int i, int len)
1191
+ {
1192
+ float[][] data = new float[1][];
1193
+ int start, j;
1194
+ if((start = cache.get_data(i,data,len)) < len)
1195
+ {
1196
+ for(j=start;j<len;j++)
1197
+ data[0][j] = (float)kernel_function(i,j);
1198
+ }
1199
+ return data[0];
1200
+ }
1201
+
1202
+ double[] get_QD()
1203
+ {
1204
+ return QD;
1205
+ }
1206
+
1207
+ void swap_index(int i, int j)
1208
+ {
1209
+ cache.swap_index(i,j);
1210
+ super.swap_index(i,j);
1211
+ do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
1212
+ }
1213
+ }
1214
+
1215
+ class SVR_Q extends Kernel
1216
+ {
1217
+ private final int l;
1218
+ private final Cache cache;
1219
+ private final byte[] sign;
1220
+ private final int[] index;
1221
+ private int next_buffer;
1222
+ private float[][] buffer;
1223
+ private final double[] QD;
1224
+
1225
+ SVR_Q(svm_problem prob, svm_parameter param)
1226
+ {
1227
+ super(prob.l, prob.x, param);
1228
+ l = prob.l;
1229
+ cache = new Cache(l,(long)(param.cache_size*(1<<20)));
1230
+ QD = new double[2*l];
1231
+ sign = new byte[2*l];
1232
+ index = new int[2*l];
1233
+ for(int k=0;k<l;k++)
1234
+ {
1235
+ sign[k] = 1;
1236
+ sign[k+l] = -1;
1237
+ index[k] = k;
1238
+ index[k+l] = k;
1239
+ QD[k] = kernel_function(k,k);
1240
+ QD[k+l] = QD[k];
1241
+ }
1242
+ buffer = new float[2][2*l];
1243
+ next_buffer = 0;
1244
+ }
1245
+
1246
+ void swap_index(int i, int j)
1247
+ {
1248
+ do {byte _=sign[i]; sign[i]=sign[j]; sign[j]=_;} while(false);
1249
+ do {int _=index[i]; index[i]=index[j]; index[j]=_;} while(false);
1250
+ do {double _=QD[i]; QD[i]=QD[j]; QD[j]=_;} while(false);
1251
+ }
1252
+
1253
+ float[] get_Q(int i, int len)
1254
+ {
1255
+ float[][] data = new float[1][];
1256
+ int j, real_i = index[i];
1257
+ if(cache.get_data(real_i,data,l) < l)
1258
+ {
1259
+ for(j=0;j<l;j++)
1260
+ data[0][j] = (float)kernel_function(real_i,j);
1261
+ }
1262
+
1263
+ // reorder and copy
1264
+ float buf[] = buffer[next_buffer];
1265
+ next_buffer = 1 - next_buffer;
1266
+ byte si = sign[i];
1267
+ for(j=0;j<len;j++)
1268
+ buf[j] = (float) si * sign[j] * data[0][index[j]];
1269
+ return buf;
1270
+ }
1271
+
1272
+ double[] get_QD()
1273
+ {
1274
+ return QD;
1275
+ }
1276
+ }
1277
+
1278
+ public class svm {
1279
+ //
1280
+ // construct and solve various formulations
1281
+ //
1282
+ public static final int LIBSVM_VERSION=300;
1283
+
1284
+ private static svm_print_interface svm_print_stdout = new svm_print_interface()
1285
+ {
1286
+ public void print(String s)
1287
+ {
1288
+ System.out.print(s);
1289
+ System.out.flush();
1290
+ }
1291
+ };
1292
+
1293
+ private static svm_print_interface svm_print_string = svm_print_stdout;
1294
+
1295
+ static void info(String s)
1296
+ {
1297
+ svm_print_string.print(s);
1298
+ }
1299
+
1300
+ private static void solve_c_svc(svm_problem prob, svm_parameter param,
1301
+ double[] alpha, Solver.SolutionInfo si,
1302
+ double Cp, double Cn)
1303
+ {
1304
+ int l = prob.l;
1305
+ double[] minus_ones = new double[l];
1306
+ byte[] y = new byte[l];
1307
+
1308
+ int i;
1309
+
1310
+ for(i=0;i<l;i++)
1311
+ {
1312
+ alpha[i] = 0;
1313
+ minus_ones[i] = -1;
1314
+ if(prob.y[i] > 0) y[i] = +1; else y[i] = -1;
1315
+ }
1316
+
1317
+ Solver s = new Solver();
1318
+ s.Solve(l, new SVC_Q(prob,param,y), minus_ones, y,
1319
+ alpha, Cp, Cn, param.eps, si, param.shrinking);
1320
+
1321
+ double sum_alpha=0;
1322
+ for(i=0;i<l;i++)
1323
+ sum_alpha += alpha[i];
1324
+
1325
+ if (Cp==Cn)
1326
+ svm.info("nu = "+sum_alpha/(Cp*prob.l)+"\n");
1327
+
1328
+ for(i=0;i<l;i++)
1329
+ alpha[i] *= y[i];
1330
+ }
1331
+
1332
+ private static void solve_nu_svc(svm_problem prob, svm_parameter param,
1333
+ double[] alpha, Solver.SolutionInfo si)
1334
+ {
1335
+ int i;
1336
+ int l = prob.l;
1337
+ double nu = param.nu;
1338
+
1339
+ byte[] y = new byte[l];
1340
+
1341
+ for(i=0;i<l;i++)
1342
+ if(prob.y[i]>0)
1343
+ y[i] = +1;
1344
+ else
1345
+ y[i] = -1;
1346
+
1347
+ double sum_pos = nu*l/2;
1348
+ double sum_neg = nu*l/2;
1349
+
1350
+ for(i=0;i<l;i++)
1351
+ if(y[i] == +1)
1352
+ {
1353
+ alpha[i] = Math.min(1.0,sum_pos);
1354
+ sum_pos -= alpha[i];
1355
+ }
1356
+ else
1357
+ {
1358
+ alpha[i] = Math.min(1.0,sum_neg);
1359
+ sum_neg -= alpha[i];
1360
+ }
1361
+
1362
+ double[] zeros = new double[l];
1363
+
1364
+ for(i=0;i<l;i++)
1365
+ zeros[i] = 0;
1366
+
1367
+ Solver_NU s = new Solver_NU();
1368
+ s.Solve(l, new SVC_Q(prob,param,y), zeros, y,
1369
+ alpha, 1.0, 1.0, param.eps, si, param.shrinking);
1370
+ double r = si.r;
1371
+
1372
+ svm.info("C = "+1/r+"\n");
1373
+
1374
+ for(i=0;i<l;i++)
1375
+ alpha[i] *= y[i]/r;
1376
+
1377
+ si.rho /= r;
1378
+ si.obj /= (r*r);
1379
+ si.upper_bound_p = 1/r;
1380
+ si.upper_bound_n = 1/r;
1381
+ }
1382
+
1383
+ private static void solve_one_class(svm_problem prob, svm_parameter param,
1384
+ double[] alpha, Solver.SolutionInfo si)
1385
+ {
1386
+ int l = prob.l;
1387
+ double[] zeros = new double[l];
1388
+ byte[] ones = new byte[l];
1389
+ int i;
1390
+
1391
+ int n = (int)(param.nu*prob.l); // # of alpha's at upper bound
1392
+
1393
+ for(i=0;i<n;i++)
1394
+ alpha[i] = 1;
1395
+ if(n<prob.l)
1396
+ alpha[n] = param.nu * prob.l - n;
1397
+ for(i=n+1;i<l;i++)
1398
+ alpha[i] = 0;
1399
+
1400
+ for(i=0;i<l;i++)
1401
+ {
1402
+ zeros[i] = 0;
1403
+ ones[i] = 1;
1404
+ }
1405
+
1406
+ Solver s = new Solver();
1407
+ s.Solve(l, new ONE_CLASS_Q(prob,param), zeros, ones,
1408
+ alpha, 1.0, 1.0, param.eps, si, param.shrinking);
1409
+ }
1410
+
1411
+ private static void solve_epsilon_svr(svm_problem prob, svm_parameter param,
1412
+ double[] alpha, Solver.SolutionInfo si)
1413
+ {
1414
+ int l = prob.l;
1415
+ double[] alpha2 = new double[2*l];
1416
+ double[] linear_term = new double[2*l];
1417
+ byte[] y = new byte[2*l];
1418
+ int i;
1419
+
1420
+ for(i=0;i<l;i++)
1421
+ {
1422
+ alpha2[i] = 0;
1423
+ linear_term[i] = param.p - prob.y[i];
1424
+ y[i] = 1;
1425
+
1426
+ alpha2[i+l] = 0;
1427
+ linear_term[i+l] = param.p + prob.y[i];
1428
+ y[i+l] = -1;
1429
+ }
1430
+
1431
+ Solver s = new Solver();
1432
+ s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
1433
+ alpha2, param.C, param.C, param.eps, si, param.shrinking);
1434
+
1435
+ double sum_alpha = 0;
1436
+ for(i=0;i<l;i++)
1437
+ {
1438
+ alpha[i] = alpha2[i] - alpha2[i+l];
1439
+ sum_alpha += Math.abs(alpha[i]);
1440
+ }
1441
+ svm.info("nu = "+sum_alpha/(param.C*l)+"\n");
1442
+ }
1443
+
1444
+ private static void solve_nu_svr(svm_problem prob, svm_parameter param,
1445
+ double[] alpha, Solver.SolutionInfo si)
1446
+ {
1447
+ int l = prob.l;
1448
+ double C = param.C;
1449
+ double[] alpha2 = new double[2*l];
1450
+ double[] linear_term = new double[2*l];
1451
+ byte[] y = new byte[2*l];
1452
+ int i;
1453
+
1454
+ double sum = C * param.nu * l / 2;
1455
+ for(i=0;i<l;i++)
1456
+ {
1457
+ alpha2[i] = alpha2[i+l] = Math.min(sum,C);
1458
+ sum -= alpha2[i];
1459
+
1460
+ linear_term[i] = - prob.y[i];
1461
+ y[i] = 1;
1462
+
1463
+ linear_term[i+l] = prob.y[i];
1464
+ y[i+l] = -1;
1465
+ }
1466
+
1467
+ Solver_NU s = new Solver_NU();
1468
+ s.Solve(2*l, new SVR_Q(prob,param), linear_term, y,
1469
+ alpha2, C, C, param.eps, si, param.shrinking);
1470
+
1471
+ svm.info("epsilon = "+(-si.r)+"\n");
1472
+
1473
+ for(i=0;i<l;i++)
1474
+ alpha[i] = alpha2[i] - alpha2[i+l];
1475
+ }
1476
+
1477
+ //
1478
+ // decision_function
1479
+ //
1480
+ static class decision_function
1481
+ {
1482
+ double[] alpha;
1483
+ double rho;
1484
+ };
1485
+
1486
+ static decision_function svm_train_one(
1487
+ svm_problem prob, svm_parameter param,
1488
+ double Cp, double Cn)
1489
+ {
1490
+ double[] alpha = new double[prob.l];
1491
+ Solver.SolutionInfo si = new Solver.SolutionInfo();
1492
+ switch(param.svm_type)
1493
+ {
1494
+ case svm_parameter.C_SVC:
1495
+ solve_c_svc(prob,param,alpha,si,Cp,Cn);
1496
+ break;
1497
+ case svm_parameter.NU_SVC:
1498
+ solve_nu_svc(prob,param,alpha,si);
1499
+ break;
1500
+ case svm_parameter.ONE_CLASS:
1501
+ solve_one_class(prob,param,alpha,si);
1502
+ break;
1503
+ case svm_parameter.EPSILON_SVR:
1504
+ solve_epsilon_svr(prob,param,alpha,si);
1505
+ break;
1506
+ case svm_parameter.NU_SVR:
1507
+ solve_nu_svr(prob,param,alpha,si);
1508
+ break;
1509
+ }
1510
+
1511
+ svm.info("obj = "+si.obj+", rho = "+si.rho+"\n");
1512
+
1513
+ // output SVs
1514
+
1515
+ int nSV = 0;
1516
+ int nBSV = 0;
1517
+ for(int i=0;i<prob.l;i++)
1518
+ {
1519
+ if(Math.abs(alpha[i]) > 0)
1520
+ {
1521
+ ++nSV;
1522
+ if(prob.y[i] > 0)
1523
+ {
1524
+ if(Math.abs(alpha[i]) >= si.upper_bound_p)
1525
+ ++nBSV;
1526
+ }
1527
+ else
1528
+ {
1529
+ if(Math.abs(alpha[i]) >= si.upper_bound_n)
1530
+ ++nBSV;
1531
+ }
1532
+ }
1533
+ }
1534
+
1535
+ svm.info("nSV = "+nSV+", nBSV = "+nBSV+"\n");
1536
+
1537
+ decision_function f = new decision_function();
1538
+ f.alpha = alpha;
1539
+ f.rho = si.rho;
1540
+ return f;
1541
+ }
1542
+
1543
+ // Platt's binary SVM Probablistic Output: an improvement from Lin et al.
1544
+ private static void sigmoid_train(int l, double[] dec_values, double[] labels,
1545
+ double[] probAB)
1546
+ {
1547
+ double A, B;
1548
+ double prior1=0, prior0 = 0;
1549
+ int i;
1550
+
1551
+ for (i=0;i<l;i++)
1552
+ if (labels[i] > 0) prior1+=1;
1553
+ else prior0+=1;
1554
+
1555
+ int max_iter=100; // Maximal number of iterations
1556
+ double min_step=1e-10; // Minimal step taken in line search
1557
+ double sigma=1e-12; // For numerically strict PD of Hessian
1558
+ double eps=1e-5;
1559
+ double hiTarget=(prior1+1.0)/(prior1+2.0);
1560
+ double loTarget=1/(prior0+2.0);
1561
+ double[] t= new double[l];
1562
+ double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;
1563
+ double newA,newB,newf,d1,d2;
1564
+ int iter;
1565
+
1566
+ // Initial Point and Initial Fun Value
1567
+ A=0.0; B=Math.log((prior0+1.0)/(prior1+1.0));
1568
+ double fval = 0.0;
1569
+
1570
+ for (i=0;i<l;i++)
1571
+ {
1572
+ if (labels[i]>0) t[i]=hiTarget;
1573
+ else t[i]=loTarget;
1574
+ fApB = dec_values[i]*A+B;
1575
+ if (fApB>=0)
1576
+ fval += t[i]*fApB + Math.log(1+Math.exp(-fApB));
1577
+ else
1578
+ fval += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
1579
+ }
1580
+ for (iter=0;iter<max_iter;iter++)
1581
+ {
1582
+ // Update Gradient and Hessian (use H' = H + sigma I)
1583
+ h11=sigma; // numerically ensures strict PD
1584
+ h22=sigma;
1585
+ h21=0.0;g1=0.0;g2=0.0;
1586
+ for (i=0;i<l;i++)
1587
+ {
1588
+ fApB = dec_values[i]*A+B;
1589
+ if (fApB >= 0)
1590
+ {
1591
+ p=Math.exp(-fApB)/(1.0+Math.exp(-fApB));
1592
+ q=1.0/(1.0+Math.exp(-fApB));
1593
+ }
1594
+ else
1595
+ {
1596
+ p=1.0/(1.0+Math.exp(fApB));
1597
+ q=Math.exp(fApB)/(1.0+Math.exp(fApB));
1598
+ }
1599
+ d2=p*q;
1600
+ h11+=dec_values[i]*dec_values[i]*d2;
1601
+ h22+=d2;
1602
+ h21+=dec_values[i]*d2;
1603
+ d1=t[i]-p;
1604
+ g1+=dec_values[i]*d1;
1605
+ g2+=d1;
1606
+ }
1607
+
1608
+ // Stopping Criteria
1609
+ if (Math.abs(g1)<eps && Math.abs(g2)<eps)
1610
+ break;
1611
+
1612
+ // Finding Newton direction: -inv(H') * g
1613
+ det=h11*h22-h21*h21;
1614
+ dA=-(h22*g1 - h21 * g2) / det;
1615
+ dB=-(-h21*g1+ h11 * g2) / det;
1616
+ gd=g1*dA+g2*dB;
1617
+
1618
+
1619
+ stepsize = 1; // Line Search
1620
+ while (stepsize >= min_step)
1621
+ {
1622
+ newA = A + stepsize * dA;
1623
+ newB = B + stepsize * dB;
1624
+
1625
+ // New function value
1626
+ newf = 0.0;
1627
+ for (i=0;i<l;i++)
1628
+ {
1629
+ fApB = dec_values[i]*newA+newB;
1630
+ if (fApB >= 0)
1631
+ newf += t[i]*fApB + Math.log(1+Math.exp(-fApB));
1632
+ else
1633
+ newf += (t[i] - 1)*fApB +Math.log(1+Math.exp(fApB));
1634
+ }
1635
+ // Check sufficient decrease
1636
+ if (newf<fval+0.0001*stepsize*gd)
1637
+ {
1638
+ A=newA;B=newB;fval=newf;
1639
+ break;
1640
+ }
1641
+ else
1642
+ stepsize = stepsize / 2.0;
1643
+ }
1644
+
1645
+ if (stepsize < min_step)
1646
+ {
1647
+ svm.info("Line search fails in two-class probability estimates\n");
1648
+ break;
1649
+ }
1650
+ }
1651
+
1652
+ if (iter>=max_iter)
1653
+ svm.info("Reaching maximal iterations in two-class probability estimates\n");
1654
+ probAB[0]=A;probAB[1]=B;
1655
+ }
1656
+
1657
+ private static double sigmoid_predict(double decision_value, double A, double B)
1658
+ {
1659
+ double fApB = decision_value*A+B;
1660
+ if (fApB >= 0)
1661
+ return Math.exp(-fApB)/(1.0+Math.exp(-fApB));
1662
+ else
1663
+ return 1.0/(1+Math.exp(fApB)) ;
1664
+ }
1665
+
1666
+ // Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
1667
+ private static void multiclass_probability(int k, double[][] r, double[] p)
1668
+ {
1669
+ int t,j;
1670
+ int iter = 0, max_iter=Math.max(100,k);
1671
+ double[][] Q=new double[k][k];
1672
+ double[] Qp=new double[k];
1673
+ double pQp, eps=0.005/k;
1674
+
1675
+ for (t=0;t<k;t++)
1676
+ {
1677
+ p[t]=1.0/k; // Valid if k = 1
1678
+ Q[t][t]=0;
1679
+ for (j=0;j<t;j++)
1680
+ {
1681
+ Q[t][t]+=r[j][t]*r[j][t];
1682
+ Q[t][j]=Q[j][t];
1683
+ }
1684
+ for (j=t+1;j<k;j++)
1685
+ {
1686
+ Q[t][t]+=r[j][t]*r[j][t];
1687
+ Q[t][j]=-r[j][t]*r[t][j];
1688
+ }
1689
+ }
1690
+ for (iter=0;iter<max_iter;iter++)
1691
+ {
1692
+ // stopping condition, recalculate QP,pQP for numerical accuracy
1693
+ pQp=0;
1694
+ for (t=0;t<k;t++)
1695
+ {
1696
+ Qp[t]=0;
1697
+ for (j=0;j<k;j++)
1698
+ Qp[t]+=Q[t][j]*p[j];
1699
+ pQp+=p[t]*Qp[t];
1700
+ }
1701
+ double max_error=0;
1702
+ for (t=0;t<k;t++)
1703
+ {
1704
+ double error=Math.abs(Qp[t]-pQp);
1705
+ if (error>max_error)
1706
+ max_error=error;
1707
+ }
1708
+ if (max_error<eps) break;
1709
+
1710
+ for (t=0;t<k;t++)
1711
+ {
1712
+ double diff=(-Qp[t]+pQp)/Q[t][t];
1713
+ p[t]+=diff;
1714
+ pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);
1715
+ for (j=0;j<k;j++)
1716
+ {
1717
+ Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);
1718
+ p[j]/=(1+diff);
1719
+ }
1720
+ }
1721
+ }
1722
+ if (iter>=max_iter)
1723
+ svm.info("Exceeds max_iter in multiclass_prob\n");
1724
+ }
1725
+
1726
+ // Cross-validation decision values for probability estimates
1727
+ private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
1728
+ {
1729
+ int i;
1730
+ int nr_fold = 5;
1731
+ int[] perm = new int[prob.l];
1732
+ double[] dec_values = new double[prob.l];
1733
+
1734
+ // random shuffle
1735
+ for(i=0;i<prob.l;i++) perm[i]=i;
1736
+ for(i=0;i<prob.l;i++)
1737
+ {
1738
+ int j = i+(int)(Math.random()*(prob.l-i));
1739
+ do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
1740
+ }
1741
+ for(i=0;i<nr_fold;i++)
1742
+ {
1743
+ int begin = i*prob.l/nr_fold;
1744
+ int end = (i+1)*prob.l/nr_fold;
1745
+ int j,k;
1746
+ svm_problem subprob = new svm_problem();
1747
+
1748
+ subprob.l = prob.l-(end-begin);
1749
+ subprob.x = new svm_node[subprob.l][];
1750
+ subprob.y = new double[subprob.l];
1751
+
1752
+ k=0;
1753
+ for(j=0;j<begin;j++)
1754
+ {
1755
+ subprob.x[k] = prob.x[perm[j]];
1756
+ subprob.y[k] = prob.y[perm[j]];
1757
+ ++k;
1758
+ }
1759
+ for(j=end;j<prob.l;j++)
1760
+ {
1761
+ subprob.x[k] = prob.x[perm[j]];
1762
+ subprob.y[k] = prob.y[perm[j]];
1763
+ ++k;
1764
+ }
1765
+ int p_count=0,n_count=0;
1766
+ for(j=0;j<k;j++)
1767
+ if(subprob.y[j]>0)
1768
+ p_count++;
1769
+ else
1770
+ n_count++;
1771
+
1772
+ if(p_count==0 && n_count==0)
1773
+ for(j=begin;j<end;j++)
1774
+ dec_values[perm[j]] = 0;
1775
+ else if(p_count > 0 && n_count == 0)
1776
+ for(j=begin;j<end;j++)
1777
+ dec_values[perm[j]] = 1;
1778
+ else if(p_count == 0 && n_count > 0)
1779
+ for(j=begin;j<end;j++)
1780
+ dec_values[perm[j]] = -1;
1781
+ else
1782
+ {
1783
+ svm_parameter subparam = (svm_parameter)param.clone();
1784
+ subparam.probability=0;
1785
+ subparam.C=1.0;
1786
+ subparam.nr_weight=2;
1787
+ subparam.weight_label = new int[2];
1788
+ subparam.weight = new double[2];
1789
+ subparam.weight_label[0]=+1;
1790
+ subparam.weight_label[1]=-1;
1791
+ subparam.weight[0]=Cp;
1792
+ subparam.weight[1]=Cn;
1793
+ svm_model submodel = svm_train(subprob,subparam);
1794
+ for(j=begin;j<end;j++)
1795
+ {
1796
+ double[] dec_value=new double[1];
1797
+ svm_predict_values(submodel,prob.x[perm[j]],dec_value);
1798
+ dec_values[perm[j]]=dec_value[0];
1799
+ // ensure +1 -1 order; reason not using CV subroutine
1800
+ dec_values[perm[j]] *= submodel.label[0];
1801
+ }
1802
+ }
1803
+ }
1804
+ sigmoid_train(prob.l,dec_values,prob.y,probAB);
1805
+ }
1806
+
1807
+ // Return parameter of a Laplace distribution
1808
+ private static double svm_svr_probability(svm_problem prob, svm_parameter param)
1809
+ {
1810
+ int i;
1811
+ int nr_fold = 5;
1812
+ double[] ymv = new double[prob.l];
1813
+ double mae = 0;
1814
+
1815
+ svm_parameter newparam = (svm_parameter)param.clone();
1816
+ newparam.probability = 0;
1817
+ svm_cross_validation(prob,newparam,nr_fold,ymv);
1818
+ for(i=0;i<prob.l;i++)
1819
+ {
1820
+ ymv[i]=prob.y[i]-ymv[i];
1821
+ mae += Math.abs(ymv[i]);
1822
+ }
1823
+ mae /= prob.l;
1824
+ double std=Math.sqrt(2*mae*mae);
1825
+ int count=0;
1826
+ mae=0;
1827
+ for(i=0;i<prob.l;i++)
1828
+ if (Math.abs(ymv[i]) > 5*std)
1829
+ count=count+1;
1830
+ else
1831
+ mae+=Math.abs(ymv[i]);
1832
+ mae /= (prob.l-count);
1833
+ svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+mae+"\n");
1834
+ return mae;
1835
+ }
1836
+
1837
+ // label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data
1838
+ // perm, length l, must be allocated before calling this subroutine
1839
+ private static void svm_group_classes(svm_problem prob, int[] nr_class_ret, int[][] label_ret, int[][] start_ret, int[][] count_ret, int[] perm)
1840
+ {
1841
+ int l = prob.l;
1842
+ int max_nr_class = 16;
1843
+ int nr_class = 0;
1844
+ int[] label = new int[max_nr_class];
1845
+ int[] count = new int[max_nr_class];
1846
+ int[] data_label = new int[l];
1847
+ int i;
1848
+
1849
+ for(i=0;i<l;i++)
1850
+ {
1851
+ int this_label = (int)(prob.y[i]);
1852
+ int j;
1853
+ for(j=0;j<nr_class;j++)
1854
+ {
1855
+ if(this_label == label[j])
1856
+ {
1857
+ ++count[j];
1858
+ break;
1859
+ }
1860
+ }
1861
+ data_label[i] = j;
1862
+ if(j == nr_class)
1863
+ {
1864
+ if(nr_class == max_nr_class)
1865
+ {
1866
+ max_nr_class *= 2;
1867
+ int[] new_data = new int[max_nr_class];
1868
+ System.arraycopy(label,0,new_data,0,label.length);
1869
+ label = new_data;
1870
+ new_data = new int[max_nr_class];
1871
+ System.arraycopy(count,0,new_data,0,count.length);
1872
+ count = new_data;
1873
+ }
1874
+ label[nr_class] = this_label;
1875
+ count[nr_class] = 1;
1876
+ ++nr_class;
1877
+ }
1878
+ }
1879
+
1880
+ int[] start = new int[nr_class];
1881
+ start[0] = 0;
1882
+ for(i=1;i<nr_class;i++)
1883
+ start[i] = start[i-1]+count[i-1];
1884
+ for(i=0;i<l;i++)
1885
+ {
1886
+ perm[start[data_label[i]]] = i;
1887
+ ++start[data_label[i]];
1888
+ }
1889
+ start[0] = 0;
1890
+ for(i=1;i<nr_class;i++)
1891
+ start[i] = start[i-1]+count[i-1];
1892
+
1893
+ nr_class_ret[0] = nr_class;
1894
+ label_ret[0] = label;
1895
+ start_ret[0] = start;
1896
+ count_ret[0] = count;
1897
+ }
1898
+
1899
+ //
1900
+ // Interface functions
1901
+ //
1902
+ public static svm_model svm_train(svm_problem prob, svm_parameter param)
1903
+ {
1904
+ svm_model model = new svm_model();
1905
+ model.param = param;
1906
+
1907
+ if(param.svm_type == svm_parameter.ONE_CLASS ||
1908
+ param.svm_type == svm_parameter.EPSILON_SVR ||
1909
+ param.svm_type == svm_parameter.NU_SVR)
1910
+ {
1911
+ // regression or one-class-svm
1912
+ model.nr_class = 2;
1913
+ model.label = null;
1914
+ model.nSV = null;
1915
+ model.probA = null; model.probB = null;
1916
+ model.sv_coef = new double[1][];
1917
+
1918
+ if(param.probability == 1 &&
1919
+ (param.svm_type == svm_parameter.EPSILON_SVR ||
1920
+ param.svm_type == svm_parameter.NU_SVR))
1921
+ {
1922
+ model.probA = new double[1];
1923
+ model.probA[0] = svm_svr_probability(prob,param);
1924
+ }
1925
+
1926
+ decision_function f = svm_train_one(prob,param,0,0);
1927
+ model.rho = new double[1];
1928
+ model.rho[0] = f.rho;
1929
+
1930
+ int nSV = 0;
1931
+ int i;
1932
+ for(i=0;i<prob.l;i++)
1933
+ if(Math.abs(f.alpha[i]) > 0) ++nSV;
1934
+ model.l = nSV;
1935
+ model.SV = new svm_node[nSV][];
1936
+ model.sv_coef[0] = new double[nSV];
1937
+ int j = 0;
1938
+ for(i=0;i<prob.l;i++)
1939
+ if(Math.abs(f.alpha[i]) > 0)
1940
+ {
1941
+ model.SV[j] = prob.x[i];
1942
+ model.sv_coef[0][j] = f.alpha[i];
1943
+ ++j;
1944
+ }
1945
+ }
1946
+ else
1947
+ {
1948
+ // classification
1949
+ int l = prob.l;
1950
+ int[] tmp_nr_class = new int[1];
1951
+ int[][] tmp_label = new int[1][];
1952
+ int[][] tmp_start = new int[1][];
1953
+ int[][] tmp_count = new int[1][];
1954
+ int[] perm = new int[l];
1955
+
1956
+ // group training data of the same class
1957
+ svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
1958
+ int nr_class = tmp_nr_class[0];
1959
+ int[] label = tmp_label[0];
1960
+ int[] start = tmp_start[0];
1961
+ int[] count = tmp_count[0];
1962
+ svm_node[][] x = new svm_node[l][];
1963
+ int i;
1964
+ for(i=0;i<l;i++)
1965
+ x[i] = prob.x[perm[i]];
1966
+
1967
+ // calculate weighted C
1968
+
1969
+ double[] weighted_C = new double[nr_class];
1970
+ for(i=0;i<nr_class;i++)
1971
+ weighted_C[i] = param.C;
1972
+ for(i=0;i<param.nr_weight;i++)
1973
+ {
1974
+ int j;
1975
+ for(j=0;j<nr_class;j++)
1976
+ if(param.weight_label[i] == label[j])
1977
+ break;
1978
+ if(j == nr_class)
1979
+ System.err.print("warning: class label "+param.weight_label[i]+" specified in weight is not found\n");
1980
+ else
1981
+ weighted_C[j] *= param.weight[i];
1982
+ }
1983
+
1984
+ // train k*(k-1)/2 models
1985
+
1986
+ boolean[] nonzero = new boolean[l];
1987
+ for(i=0;i<l;i++)
1988
+ nonzero[i] = false;
1989
+ decision_function[] f = new decision_function[nr_class*(nr_class-1)/2];
1990
+
1991
+ double[] probA=null,probB=null;
1992
+ if (param.probability == 1)
1993
+ {
1994
+ probA=new double[nr_class*(nr_class-1)/2];
1995
+ probB=new double[nr_class*(nr_class-1)/2];
1996
+ }
1997
+
1998
+ int p = 0;
1999
+ for(i=0;i<nr_class;i++)
2000
+ for(int j=i+1;j<nr_class;j++)
2001
+ {
2002
+ svm_problem sub_prob = new svm_problem();
2003
+ int si = start[i], sj = start[j];
2004
+ int ci = count[i], cj = count[j];
2005
+ sub_prob.l = ci+cj;
2006
+ sub_prob.x = new svm_node[sub_prob.l][];
2007
+ sub_prob.y = new double[sub_prob.l];
2008
+ int k;
2009
+ for(k=0;k<ci;k++)
2010
+ {
2011
+ sub_prob.x[k] = x[si+k];
2012
+ sub_prob.y[k] = +1;
2013
+ }
2014
+ for(k=0;k<cj;k++)
2015
+ {
2016
+ sub_prob.x[ci+k] = x[sj+k];
2017
+ sub_prob.y[ci+k] = -1;
2018
+ }
2019
+
2020
+ if(param.probability == 1)
2021
+ {
2022
+ double[] probAB=new double[2];
2023
+ svm_binary_svc_probability(sub_prob,param,weighted_C[i],weighted_C[j],probAB);
2024
+ probA[p]=probAB[0];
2025
+ probB[p]=probAB[1];
2026
+ }
2027
+
2028
+ f[p] = svm_train_one(sub_prob,param,weighted_C[i],weighted_C[j]);
2029
+ for(k=0;k<ci;k++)
2030
+ if(!nonzero[si+k] && Math.abs(f[p].alpha[k]) > 0)
2031
+ nonzero[si+k] = true;
2032
+ for(k=0;k<cj;k++)
2033
+ if(!nonzero[sj+k] && Math.abs(f[p].alpha[ci+k]) > 0)
2034
+ nonzero[sj+k] = true;
2035
+ ++p;
2036
+ }
2037
+
2038
+ // build output
2039
+
2040
+ model.nr_class = nr_class;
2041
+
2042
+ model.label = new int[nr_class];
2043
+ for(i=0;i<nr_class;i++)
2044
+ model.label[i] = label[i];
2045
+
2046
+ model.rho = new double[nr_class*(nr_class-1)/2];
2047
+ for(i=0;i<nr_class*(nr_class-1)/2;i++)
2048
+ model.rho[i] = f[i].rho;
2049
+
2050
+ if(param.probability == 1)
2051
+ {
2052
+ model.probA = new double[nr_class*(nr_class-1)/2];
2053
+ model.probB = new double[nr_class*(nr_class-1)/2];
2054
+ for(i=0;i<nr_class*(nr_class-1)/2;i++)
2055
+ {
2056
+ model.probA[i] = probA[i];
2057
+ model.probB[i] = probB[i];
2058
+ }
2059
+ }
2060
+ else
2061
+ {
2062
+ model.probA=null;
2063
+ model.probB=null;
2064
+ }
2065
+
2066
+ int nnz = 0;
2067
+ int[] nz_count = new int[nr_class];
2068
+ model.nSV = new int[nr_class];
2069
+ for(i=0;i<nr_class;i++)
2070
+ {
2071
+ int nSV = 0;
2072
+ for(int j=0;j<count[i];j++)
2073
+ if(nonzero[start[i]+j])
2074
+ {
2075
+ ++nSV;
2076
+ ++nnz;
2077
+ }
2078
+ model.nSV[i] = nSV;
2079
+ nz_count[i] = nSV;
2080
+ }
2081
+
2082
+ svm.info("Total nSV = "+nnz+"\n");
2083
+
2084
+ model.l = nnz;
2085
+ model.SV = new svm_node[nnz][];
2086
+ p = 0;
2087
+ for(i=0;i<l;i++)
2088
+ if(nonzero[i]) model.SV[p++] = x[i];
2089
+
2090
+ int[] nz_start = new int[nr_class];
2091
+ nz_start[0] = 0;
2092
+ for(i=1;i<nr_class;i++)
2093
+ nz_start[i] = nz_start[i-1]+nz_count[i-1];
2094
+
2095
+ model.sv_coef = new double[nr_class-1][];
2096
+ for(i=0;i<nr_class-1;i++)
2097
+ model.sv_coef[i] = new double[nnz];
2098
+
2099
+ p = 0;
2100
+ for(i=0;i<nr_class;i++)
2101
+ for(int j=i+1;j<nr_class;j++)
2102
+ {
2103
+ // classifier (i,j): coefficients with
2104
+ // i are in sv_coef[j-1][nz_start[i]...],
2105
+ // j are in sv_coef[i][nz_start[j]...]
2106
+
2107
+ int si = start[i];
2108
+ int sj = start[j];
2109
+ int ci = count[i];
2110
+ int cj = count[j];
2111
+
2112
+ int q = nz_start[i];
2113
+ int k;
2114
+ for(k=0;k<ci;k++)
2115
+ if(nonzero[si+k])
2116
+ model.sv_coef[j-1][q++] = f[p].alpha[k];
2117
+ q = nz_start[j];
2118
+ for(k=0;k<cj;k++)
2119
+ if(nonzero[sj+k])
2120
+ model.sv_coef[i][q++] = f[p].alpha[ci+k];
2121
+ ++p;
2122
+ }
2123
+ }
2124
+ return model;
2125
+ }
2126
+
2127
+ // Stratified cross validation
2128
+ public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target)
2129
+ {
2130
+ int i;
2131
+ int[] fold_start = new int[nr_fold+1];
2132
+ int l = prob.l;
2133
+ int[] perm = new int[l];
2134
+
2135
+ // stratified cv may not give leave-one-out rate
2136
+ // Each class to l folds -> some folds may have zero elements
2137
+ if((param.svm_type == svm_parameter.C_SVC ||
2138
+ param.svm_type == svm_parameter.NU_SVC) && nr_fold < l)
2139
+ {
2140
+ int[] tmp_nr_class = new int[1];
2141
+ int[][] tmp_label = new int[1][];
2142
+ int[][] tmp_start = new int[1][];
2143
+ int[][] tmp_count = new int[1][];
2144
+
2145
+ svm_group_classes(prob,tmp_nr_class,tmp_label,tmp_start,tmp_count,perm);
2146
+
2147
+ int nr_class = tmp_nr_class[0];
2148
+ int[] start = tmp_start[0];
2149
+ int[] count = tmp_count[0];
2150
+
2151
+ // random shuffle and then data grouped by fold using the array perm
2152
+ int[] fold_count = new int[nr_fold];
2153
+ int c;
2154
+ int[] index = new int[l];
2155
+ for(i=0;i<l;i++)
2156
+ index[i]=perm[i];
2157
+ for (c=0; c<nr_class; c++)
2158
+ for(i=0;i<count[c];i++)
2159
+ {
2160
+ int j = i+(int)(Math.random()*(count[c]-i));
2161
+ do {int _=index[start[c]+j]; index[start[c]+j]=index[start[c]+i]; index[start[c]+i]=_;} while(false);
2162
+ }
2163
+ for(i=0;i<nr_fold;i++)
2164
+ {
2165
+ fold_count[i] = 0;
2166
+ for (c=0; c<nr_class;c++)
2167
+ fold_count[i]+=(i+1)*count[c]/nr_fold-i*count[c]/nr_fold;
2168
+ }
2169
+ fold_start[0]=0;
2170
+ for (i=1;i<=nr_fold;i++)
2171
+ fold_start[i] = fold_start[i-1]+fold_count[i-1];
2172
+ for (c=0; c<nr_class;c++)
2173
+ for(i=0;i<nr_fold;i++)
2174
+ {
2175
+ int begin = start[c]+i*count[c]/nr_fold;
2176
+ int end = start[c]+(i+1)*count[c]/nr_fold;
2177
+ for(int j=begin;j<end;j++)
2178
+ {
2179
+ perm[fold_start[i]] = index[j];
2180
+ fold_start[i]++;
2181
+ }
2182
+ }
2183
+ fold_start[0]=0;
2184
+ for (i=1;i<=nr_fold;i++)
2185
+ fold_start[i] = fold_start[i-1]+fold_count[i-1];
2186
+ }
2187
+ else
2188
+ {
2189
+ for(i=0;i<l;i++) perm[i]=i;
2190
+ for(i=0;i<l;i++)
2191
+ {
2192
+ int j = i+(int)(Math.random()*(l-i));
2193
+ do {int _=perm[i]; perm[i]=perm[j]; perm[j]=_;} while(false);
2194
+ }
2195
+ for(i=0;i<=nr_fold;i++)
2196
+ fold_start[i]=i*l/nr_fold;
2197
+ }
2198
+
2199
+ for(i=0;i<nr_fold;i++)
2200
+ {
2201
+ int begin = fold_start[i];
2202
+ int end = fold_start[i+1];
2203
+ int j,k;
2204
+ svm_problem subprob = new svm_problem();
2205
+
2206
+ subprob.l = l-(end-begin);
2207
+ subprob.x = new svm_node[subprob.l][];
2208
+ subprob.y = new double[subprob.l];
2209
+
2210
+ k=0;
2211
+ for(j=0;j<begin;j++)
2212
+ {
2213
+ subprob.x[k] = prob.x[perm[j]];
2214
+ subprob.y[k] = prob.y[perm[j]];
2215
+ ++k;
2216
+ }
2217
+ for(j=end;j<l;j++)
2218
+ {
2219
+ subprob.x[k] = prob.x[perm[j]];
2220
+ subprob.y[k] = prob.y[perm[j]];
2221
+ ++k;
2222
+ }
2223
+ svm_model submodel = svm_train(subprob,param);
2224
+ if(param.probability==1 &&
2225
+ (param.svm_type == svm_parameter.C_SVC ||
2226
+ param.svm_type == svm_parameter.NU_SVC))
2227
+ {
2228
+ double[] prob_estimates= new double[svm_get_nr_class(submodel)];
2229
+ for(j=begin;j<end;j++)
2230
+ target[perm[j]] = svm_predict_probability(submodel,prob.x[perm[j]],prob_estimates);
2231
+ }
2232
+ else
2233
+ for(j=begin;j<end;j++)
2234
+ target[perm[j]] = svm_predict(submodel,prob.x[perm[j]]);
2235
+ }
2236
+ }
2237
+
2238
+ public static int svm_get_svm_type(svm_model model)
2239
+ {
2240
+ return model.param.svm_type;
2241
+ }
2242
+
2243
+ public static int svm_get_nr_class(svm_model model)
2244
+ {
2245
+ return model.nr_class;
2246
+ }
2247
+
2248
+ public static void svm_get_labels(svm_model model, int[] label)
2249
+ {
2250
+ if (model.label != null)
2251
+ for(int i=0;i<model.nr_class;i++)
2252
+ label[i] = model.label[i];
2253
+ }
2254
+
2255
+ public static double svm_get_svr_probability(svm_model model)
2256
+ {
2257
+ if ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
2258
+ model.probA!=null)
2259
+ return model.probA[0];
2260
+ else
2261
+ {
2262
+ System.err.print("Model doesn't contain information for SVR probability inference\n");
2263
+ return 0;
2264
+ }
2265
+ }
2266
+
2267
+ public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values)
2268
+ {
2269
+ if(model.param.svm_type == svm_parameter.ONE_CLASS ||
2270
+ model.param.svm_type == svm_parameter.EPSILON_SVR ||
2271
+ model.param.svm_type == svm_parameter.NU_SVR)
2272
+ {
2273
+ double[] sv_coef = model.sv_coef[0];
2274
+ double sum = 0;
2275
+ for(int i=0;i<model.l;i++)
2276
+ sum += sv_coef[i] * Kernel.k_function(x,model.SV[i],model.param);
2277
+ sum -= model.rho[0];
2278
+ dec_values[0] = sum;
2279
+
2280
+ if(model.param.svm_type == svm_parameter.ONE_CLASS)
2281
+ return (sum>0)?1:-1;
2282
+ else
2283
+ return sum;
2284
+ }
2285
+ else
2286
+ {
2287
+ int i;
2288
+ int nr_class = model.nr_class;
2289
+ int l = model.l;
2290
+
2291
+ double[] kvalue = new double[l];
2292
+ for(i=0;i<l;i++)
2293
+ kvalue[i] = Kernel.k_function(x,model.SV[i],model.param);
2294
+
2295
+ int[] start = new int[nr_class];
2296
+ start[0] = 0;
2297
+ for(i=1;i<nr_class;i++)
2298
+ start[i] = start[i-1]+model.nSV[i-1];
2299
+
2300
+ int[] vote = new int[nr_class];
2301
+ for(i=0;i<nr_class;i++)
2302
+ vote[i] = 0;
2303
+
2304
+ int p=0;
2305
+ for(i=0;i<nr_class;i++)
2306
+ for(int j=i+1;j<nr_class;j++)
2307
+ {
2308
+ double sum = 0;
2309
+ int si = start[i];
2310
+ int sj = start[j];
2311
+ int ci = model.nSV[i];
2312
+ int cj = model.nSV[j];
2313
+
2314
+ int k;
2315
+ double[] coef1 = model.sv_coef[j-1];
2316
+ double[] coef2 = model.sv_coef[i];
2317
+ for(k=0;k<ci;k++)
2318
+ sum += coef1[si+k] * kvalue[si+k];
2319
+ for(k=0;k<cj;k++)
2320
+ sum += coef2[sj+k] * kvalue[sj+k];
2321
+ sum -= model.rho[p];
2322
+ dec_values[p] = sum;
2323
+
2324
+ if(dec_values[p] > 0)
2325
+ ++vote[i];
2326
+ else
2327
+ ++vote[j];
2328
+ p++;
2329
+ }
2330
+
2331
+ int vote_max_idx = 0;
2332
+ for(i=1;i<nr_class;i++)
2333
+ if(vote[i] > vote[vote_max_idx])
2334
+ vote_max_idx = i;
2335
+
2336
+ return model.label[vote_max_idx];
2337
+ }
2338
+ }
2339
+
2340
+ public static double svm_predict(svm_model model, svm_node[] x)
2341
+ {
2342
+ int nr_class = model.nr_class;
2343
+ double[] dec_values;
2344
+ if(model.param.svm_type == svm_parameter.ONE_CLASS ||
2345
+ model.param.svm_type == svm_parameter.EPSILON_SVR ||
2346
+ model.param.svm_type == svm_parameter.NU_SVR)
2347
+ dec_values = new double[1];
2348
+ else
2349
+ dec_values = new double[nr_class*(nr_class-1)/2];
2350
+ double pred_result = svm_predict_values(model, x, dec_values);
2351
+ return pred_result;
2352
+ }
2353
+
2354
+ public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates)
2355
+ {
2356
+ if ((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
2357
+ model.probA!=null && model.probB!=null)
2358
+ {
2359
+ int i;
2360
+ int nr_class = model.nr_class;
2361
+ double[] dec_values = new double[nr_class*(nr_class-1)/2];
2362
+ svm_predict_values(model, x, dec_values);
2363
+
2364
+ double min_prob=1e-7;
2365
+ double[][] pairwise_prob=new double[nr_class][nr_class];
2366
+
2367
+ int k=0;
2368
+ for(i=0;i<nr_class;i++)
2369
+ for(int j=i+1;j<nr_class;j++)
2370
+ {
2371
+ pairwise_prob[i][j]=Math.min(Math.max(sigmoid_predict(dec_values[k],model.probA[k],model.probB[k]),min_prob),1-min_prob);
2372
+ pairwise_prob[j][i]=1-pairwise_prob[i][j];
2373
+ k++;
2374
+ }
2375
+ multiclass_probability(nr_class,pairwise_prob,prob_estimates);
2376
+
2377
+ int prob_max_idx = 0;
2378
+ for(i=1;i<nr_class;i++)
2379
+ if(prob_estimates[i] > prob_estimates[prob_max_idx])
2380
+ prob_max_idx = i;
2381
+ return model.label[prob_max_idx];
2382
+ }
2383
+ else
2384
+ return svm_predict(model, x);
2385
+ }
2386
+
2387
+ static final String svm_type_table[] =
2388
+ {
2389
+ "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",
2390
+ };
2391
+
2392
+ static final String kernel_type_table[]=
2393
+ {
2394
+ "linear","polynomial","rbf","sigmoid","precomputed"
2395
+ };
2396
+
2397
+ public static void svm_save_model(String model_file_name, svm_model model) throws IOException
2398
+ {
2399
+ DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(model_file_name)));
2400
+
2401
+ svm_parameter param = model.param;
2402
+
2403
+ fp.writeBytes("svm_type "+svm_type_table[param.svm_type]+"\n");
2404
+ fp.writeBytes("kernel_type "+kernel_type_table[param.kernel_type]+"\n");
2405
+
2406
+ if(param.kernel_type == svm_parameter.POLY)
2407
+ fp.writeBytes("degree "+param.degree+"\n");
2408
+
2409
+ if(param.kernel_type == svm_parameter.POLY ||
2410
+ param.kernel_type == svm_parameter.RBF ||
2411
+ param.kernel_type == svm_parameter.SIGMOID)
2412
+ fp.writeBytes("gamma "+param.gamma+"\n");
2413
+
2414
+ if(param.kernel_type == svm_parameter.POLY ||
2415
+ param.kernel_type == svm_parameter.SIGMOID)
2416
+ fp.writeBytes("coef0 "+param.coef0+"\n");
2417
+
2418
+ int nr_class = model.nr_class;
2419
+ int l = model.l;
2420
+ fp.writeBytes("nr_class "+nr_class+"\n");
2421
+ fp.writeBytes("total_sv "+l+"\n");
2422
+
2423
+ {
2424
+ fp.writeBytes("rho");
2425
+ for(int i=0;i<nr_class*(nr_class-1)/2;i++)
2426
+ fp.writeBytes(" "+model.rho[i]);
2427
+ fp.writeBytes("\n");
2428
+ }
2429
+
2430
+ if(model.label != null)
2431
+ {
2432
+ fp.writeBytes("label");
2433
+ for(int i=0;i<nr_class;i++)
2434
+ fp.writeBytes(" "+model.label[i]);
2435
+ fp.writeBytes("\n");
2436
+ }
2437
+
2438
+ if(model.probA != null) // regression has probA only
2439
+ {
2440
+ fp.writeBytes("probA");
2441
+ for(int i=0;i<nr_class*(nr_class-1)/2;i++)
2442
+ fp.writeBytes(" "+model.probA[i]);
2443
+ fp.writeBytes("\n");
2444
+ }
2445
+ if(model.probB != null)
2446
+ {
2447
+ fp.writeBytes("probB");
2448
+ for(int i=0;i<nr_class*(nr_class-1)/2;i++)
2449
+ fp.writeBytes(" "+model.probB[i]);
2450
+ fp.writeBytes("\n");
2451
+ }
2452
+
2453
+ if(model.nSV != null)
2454
+ {
2455
+ fp.writeBytes("nr_sv");
2456
+ for(int i=0;i<nr_class;i++)
2457
+ fp.writeBytes(" "+model.nSV[i]);
2458
+ fp.writeBytes("\n");
2459
+ }
2460
+
2461
+ fp.writeBytes("SV\n");
2462
+ double[][] sv_coef = model.sv_coef;
2463
+ svm_node[][] SV = model.SV;
2464
+
2465
+ for(int i=0;i<l;i++)
2466
+ {
2467
+ for(int j=0;j<nr_class-1;j++)
2468
+ fp.writeBytes(sv_coef[j][i]+" ");
2469
+
2470
+ svm_node[] p = SV[i];
2471
+ if(param.kernel_type == svm_parameter.PRECOMPUTED)
2472
+ fp.writeBytes("0:"+(int)(p[0].value));
2473
+ else
2474
+ for(int j=0;j<p.length;j++)
2475
+ fp.writeBytes(p[j].index+":"+p[j].value+" ");
2476
+ fp.writeBytes("\n");
2477
+ }
2478
+
2479
+ fp.close();
2480
+ }
2481
+
2482
+ private static double atof(String s)
2483
+ {
2484
+ return Double.valueOf(s).doubleValue();
2485
+ }
2486
+
2487
+ private static int atoi(String s)
2488
+ {
2489
+ return Integer.parseInt(s);
2490
+ }
2491
+
2492
+ public static svm_model svm_load_model(String model_file_name) throws IOException
2493
+ {
2494
+ return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
2495
+ }
2496
+
2497
+ public static svm_model svm_load_model(BufferedReader fp) throws IOException
2498
+ {
2499
+ // read parameters
2500
+
2501
+ svm_model model = new svm_model();
2502
+ svm_parameter param = new svm_parameter();
2503
+ model.param = param;
2504
+ model.rho = null;
2505
+ model.probA = null;
2506
+ model.probB = null;
2507
+ model.label = null;
2508
+ model.nSV = null;
2509
+
2510
+ while(true)
2511
+ {
2512
+ String cmd = fp.readLine();
2513
+ String arg = cmd.substring(cmd.indexOf(' ')+1);
2514
+
2515
+ if(cmd.startsWith("svm_type"))
2516
+ {
2517
+ int i;
2518
+ for(i=0;i<svm_type_table.length;i++)
2519
+ {
2520
+ if(arg.indexOf(svm_type_table[i])!=-1)
2521
+ {
2522
+ param.svm_type=i;
2523
+ break;
2524
+ }
2525
+ }
2526
+ if(i == svm_type_table.length)
2527
+ {
2528
+ System.err.print("unknown svm type.\n");
2529
+ return null;
2530
+ }
2531
+ }
2532
+ else if(cmd.startsWith("kernel_type"))
2533
+ {
2534
+ int i;
2535
+ for(i=0;i<kernel_type_table.length;i++)
2536
+ {
2537
+ if(arg.indexOf(kernel_type_table[i])!=-1)
2538
+ {
2539
+ param.kernel_type=i;
2540
+ break;
2541
+ }
2542
+ }
2543
+ if(i == kernel_type_table.length)
2544
+ {
2545
+ System.err.print("unknown kernel function.\n");
2546
+ return null;
2547
+ }
2548
+ }
2549
+ else if(cmd.startsWith("degree"))
2550
+ param.degree = atoi(arg);
2551
+ else if(cmd.startsWith("gamma"))
2552
+ param.gamma = atof(arg);
2553
+ else if(cmd.startsWith("coef0"))
2554
+ param.coef0 = atof(arg);
2555
+ else if(cmd.startsWith("nr_class"))
2556
+ model.nr_class = atoi(arg);
2557
+ else if(cmd.startsWith("total_sv"))
2558
+ model.l = atoi(arg);
2559
+ else if(cmd.startsWith("rho"))
2560
+ {
2561
+ int n = model.nr_class * (model.nr_class-1)/2;
2562
+ model.rho = new double[n];
2563
+ StringTokenizer st = new StringTokenizer(arg);
2564
+ for(int i=0;i<n;i++)
2565
+ model.rho[i] = atof(st.nextToken());
2566
+ }
2567
+ else if(cmd.startsWith("label"))
2568
+ {
2569
+ int n = model.nr_class;
2570
+ model.label = new int[n];
2571
+ StringTokenizer st = new StringTokenizer(arg);
2572
+ for(int i=0;i<n;i++)
2573
+ model.label[i] = atoi(st.nextToken());
2574
+ }
2575
+ else if(cmd.startsWith("probA"))
2576
+ {
2577
+ int n = model.nr_class*(model.nr_class-1)/2;
2578
+ model.probA = new double[n];
2579
+ StringTokenizer st = new StringTokenizer(arg);
2580
+ for(int i=0;i<n;i++)
2581
+ model.probA[i] = atof(st.nextToken());
2582
+ }
2583
+ else if(cmd.startsWith("probB"))
2584
+ {
2585
+ int n = model.nr_class*(model.nr_class-1)/2;
2586
+ model.probB = new double[n];
2587
+ StringTokenizer st = new StringTokenizer(arg);
2588
+ for(int i=0;i<n;i++)
2589
+ model.probB[i] = atof(st.nextToken());
2590
+ }
2591
+ else if(cmd.startsWith("nr_sv"))
2592
+ {
2593
+ int n = model.nr_class;
2594
+ model.nSV = new int[n];
2595
+ StringTokenizer st = new StringTokenizer(arg);
2596
+ for(int i=0;i<n;i++)
2597
+ model.nSV[i] = atoi(st.nextToken());
2598
+ }
2599
+ else if(cmd.startsWith("SV"))
2600
+ {
2601
+ break;
2602
+ }
2603
+ else
2604
+ {
2605
+ System.err.print("unknown text in model file: ["+cmd+"]\n");
2606
+ return null;
2607
+ }
2608
+ }
2609
+
2610
+ // read sv_coef and SV
2611
+
2612
+ int m = model.nr_class - 1;
2613
+ int l = model.l;
2614
+ model.sv_coef = new double[m][l];
2615
+ model.SV = new svm_node[l][];
2616
+
2617
+ for(int i=0;i<l;i++)
2618
+ {
2619
+ String line = fp.readLine();
2620
+ StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
2621
+
2622
+ for(int k=0;k<m;k++)
2623
+ model.sv_coef[k][i] = atof(st.nextToken());
2624
+ int n = st.countTokens()/2;
2625
+ model.SV[i] = new svm_node[n];
2626
+ for(int j=0;j<n;j++)
2627
+ {
2628
+ model.SV[i][j] = new svm_node();
2629
+ model.SV[i][j].index = atoi(st.nextToken());
2630
+ model.SV[i][j].value = atof(st.nextToken());
2631
+ }
2632
+ }
2633
+
2634
+ fp.close();
2635
+ return model;
2636
+ }
2637
+
2638
+ public static String svm_check_parameter(svm_problem prob, svm_parameter param)
2639
+ {
2640
+ // svm_type
2641
+
2642
+ int svm_type = param.svm_type;
2643
+ if(svm_type != svm_parameter.C_SVC &&
2644
+ svm_type != svm_parameter.NU_SVC &&
2645
+ svm_type != svm_parameter.ONE_CLASS &&
2646
+ svm_type != svm_parameter.EPSILON_SVR &&
2647
+ svm_type != svm_parameter.NU_SVR)
2648
+ return "unknown svm type";
2649
+
2650
+ // kernel_type, degree
2651
+
2652
+ int kernel_type = param.kernel_type;
2653
+ if(kernel_type != svm_parameter.LINEAR &&
2654
+ kernel_type != svm_parameter.POLY &&
2655
+ kernel_type != svm_parameter.RBF &&
2656
+ kernel_type != svm_parameter.SIGMOID &&
2657
+ kernel_type != svm_parameter.PRECOMPUTED)
2658
+ return "unknown kernel type";
2659
+
2660
+ if(param.gamma < 0)
2661
+ return "gamma < 0";
2662
+
2663
+ if(param.degree < 0)
2664
+ return "degree of polynomial kernel < 0";
2665
+
2666
+ // cache_size,eps,C,nu,p,shrinking
2667
+
2668
+ if(param.cache_size <= 0)
2669
+ return "cache_size <= 0";
2670
+
2671
+ if(param.eps <= 0)
2672
+ return "eps <= 0";
2673
+
2674
+ if(svm_type == svm_parameter.C_SVC ||
2675
+ svm_type == svm_parameter.EPSILON_SVR ||
2676
+ svm_type == svm_parameter.NU_SVR)
2677
+ if(param.C <= 0)
2678
+ return "C <= 0";
2679
+
2680
+ if(svm_type == svm_parameter.NU_SVC ||
2681
+ svm_type == svm_parameter.ONE_CLASS ||
2682
+ svm_type == svm_parameter.NU_SVR)
2683
+ if(param.nu <= 0 || param.nu > 1)
2684
+ return "nu <= 0 or nu > 1";
2685
+
2686
+ if(svm_type == svm_parameter.EPSILON_SVR)
2687
+ if(param.p < 0)
2688
+ return "p < 0";
2689
+
2690
+ if(param.shrinking != 0 &&
2691
+ param.shrinking != 1)
2692
+ return "shrinking != 0 and shrinking != 1";
2693
+
2694
+ if(param.probability != 0 &&
2695
+ param.probability != 1)
2696
+ return "probability != 0 and probability != 1";
2697
+
2698
+ if(param.probability == 1 &&
2699
+ svm_type == svm_parameter.ONE_CLASS)
2700
+ return "one-class SVM probability output not supported yet";
2701
+
2702
+ // check whether nu-svc is feasible
2703
+
2704
+ if(svm_type == svm_parameter.NU_SVC)
2705
+ {
2706
+ int l = prob.l;
2707
+ int max_nr_class = 16;
2708
+ int nr_class = 0;
2709
+ int[] label = new int[max_nr_class];
2710
+ int[] count = new int[max_nr_class];
2711
+
2712
+ int i;
2713
+ for(i=0;i<l;i++)
2714
+ {
2715
+ int this_label = (int)prob.y[i];
2716
+ int j;
2717
+ for(j=0;j<nr_class;j++)
2718
+ if(this_label == label[j])
2719
+ {
2720
+ ++count[j];
2721
+ break;
2722
+ }
2723
+
2724
+ if(j == nr_class)
2725
+ {
2726
+ if(nr_class == max_nr_class)
2727
+ {
2728
+ max_nr_class *= 2;
2729
+ int[] new_data = new int[max_nr_class];
2730
+ System.arraycopy(label,0,new_data,0,label.length);
2731
+ label = new_data;
2732
+
2733
+ new_data = new int[max_nr_class];
2734
+ System.arraycopy(count,0,new_data,0,count.length);
2735
+ count = new_data;
2736
+ }
2737
+ label[nr_class] = this_label;
2738
+ count[nr_class] = 1;
2739
+ ++nr_class;
2740
+ }
2741
+ }
2742
+
2743
+ for(i=0;i<nr_class;i++)
2744
+ {
2745
+ int n1 = count[i];
2746
+ for(int j=i+1;j<nr_class;j++)
2747
+ {
2748
+ int n2 = count[j];
2749
+ if(param.nu*(n1+n2)/2 > Math.min(n1,n2))
2750
+ return "specified nu is infeasible";
2751
+ }
2752
+ }
2753
+ }
2754
+
2755
+ return null;
2756
+ }
2757
+
2758
+ public static int svm_check_probability_model(svm_model model)
2759
+ {
2760
+ if (((model.param.svm_type == svm_parameter.C_SVC || model.param.svm_type == svm_parameter.NU_SVC) &&
2761
+ model.probA!=null && model.probB!=null) ||
2762
+ ((model.param.svm_type == svm_parameter.EPSILON_SVR || model.param.svm_type == svm_parameter.NU_SVR) &&
2763
+ model.probA!=null))
2764
+ return 1;
2765
+ else
2766
+ return 0;
2767
+ }
2768
+
2769
+ public static void svm_set_print_string_function(svm_print_interface print_func)
2770
+ {
2771
+ if (print_func == null)
2772
+ svm_print_string = svm_print_stdout;
2773
+ else
2774
+ svm_print_string = print_func;
2775
+ }
2776
+ }