opener-opinion-detector-basic 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (168) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +30 -0
  3. data/bin/opinion-detector-basic +19 -0
  4. data/bin/opinion-detector-basic-server +10 -0
  5. data/config.ru +4 -0
  6. data/core/opinion_detector_basic_multi.py +499 -0
  7. data/core/packages/KafNafParser-1.3.tar.gz +0 -0
  8. data/core/packages/VUA_pylib-1.4.tar.gz +0 -0
  9. data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/PKG-INFO +10 -0
  10. data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/SOURCES.txt +7 -0
  11. data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/dependency_links.txt +1 -0
  12. data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/installed-files.txt +11 -0
  13. data/core/site-packages/pre_build/VUKafParserPy-1.0-py2.7.egg-info/top_level.txt +1 -0
  14. data/core/site-packages/pre_build/VUKafParserPy/KafDataObjectsMod.py +165 -0
  15. data/core/site-packages/pre_build/VUKafParserPy/KafDataObjectsMod.pyc +0 -0
  16. data/core/site-packages/pre_build/VUKafParserPy/KafParserMod.py +439 -0
  17. data/core/site-packages/pre_build/VUKafParserPy/KafParserMod.pyc +0 -0
  18. data/core/site-packages/pre_build/VUKafParserPy/__init__.py +7 -0
  19. data/core/site-packages/pre_build/VUKafParserPy/__init__.pyc +0 -0
  20. data/core/vendor/src/crfsuite/AUTHORS +1 -0
  21. data/core/vendor/src/crfsuite/COPYING +27 -0
  22. data/core/vendor/src/crfsuite/ChangeLog +103 -0
  23. data/core/vendor/src/crfsuite/INSTALL +236 -0
  24. data/core/vendor/src/crfsuite/Makefile.am +19 -0
  25. data/core/vendor/src/crfsuite/Makefile.in +783 -0
  26. data/core/vendor/src/crfsuite/README +183 -0
  27. data/core/vendor/src/crfsuite/aclocal.m4 +9018 -0
  28. data/core/vendor/src/crfsuite/autogen.sh +38 -0
  29. data/core/vendor/src/crfsuite/compile +143 -0
  30. data/core/vendor/src/crfsuite/config.guess +1502 -0
  31. data/core/vendor/src/crfsuite/config.h.in +198 -0
  32. data/core/vendor/src/crfsuite/config.sub +1714 -0
  33. data/core/vendor/src/crfsuite/configure +14273 -0
  34. data/core/vendor/src/crfsuite/configure.in +149 -0
  35. data/core/vendor/src/crfsuite/crfsuite.sln +42 -0
  36. data/core/vendor/src/crfsuite/depcomp +630 -0
  37. data/core/vendor/src/crfsuite/example/chunking.py +49 -0
  38. data/core/vendor/src/crfsuite/example/crfutils.py +179 -0
  39. data/core/vendor/src/crfsuite/example/ner.py +270 -0
  40. data/core/vendor/src/crfsuite/example/pos.py +78 -0
  41. data/core/vendor/src/crfsuite/example/template.py +88 -0
  42. data/core/vendor/src/crfsuite/frontend/Makefile.am +29 -0
  43. data/core/vendor/src/crfsuite/frontend/Makefile.in +640 -0
  44. data/core/vendor/src/crfsuite/frontend/dump.c +116 -0
  45. data/core/vendor/src/crfsuite/frontend/frontend.vcxproj +129 -0
  46. data/core/vendor/src/crfsuite/frontend/iwa.c +273 -0
  47. data/core/vendor/src/crfsuite/frontend/iwa.h +65 -0
  48. data/core/vendor/src/crfsuite/frontend/learn.c +439 -0
  49. data/core/vendor/src/crfsuite/frontend/main.c +137 -0
  50. data/core/vendor/src/crfsuite/frontend/option.c +93 -0
  51. data/core/vendor/src/crfsuite/frontend/option.h +86 -0
  52. data/core/vendor/src/crfsuite/frontend/readdata.h +38 -0
  53. data/core/vendor/src/crfsuite/frontend/reader.c +136 -0
  54. data/core/vendor/src/crfsuite/frontend/tag.c +427 -0
  55. data/core/vendor/src/crfsuite/genbinary.sh.in +15 -0
  56. data/core/vendor/src/crfsuite/include/Makefile.am +11 -0
  57. data/core/vendor/src/crfsuite/include/Makefile.in +461 -0
  58. data/core/vendor/src/crfsuite/include/crfsuite.h +1063 -0
  59. data/core/vendor/src/crfsuite/include/crfsuite.hpp +555 -0
  60. data/core/vendor/src/crfsuite/include/crfsuite_api.hpp +400 -0
  61. data/core/vendor/src/crfsuite/include/os.h +61 -0
  62. data/core/vendor/src/crfsuite/install-sh +520 -0
  63. data/core/vendor/src/crfsuite/lib/cqdb/COPYING +28 -0
  64. data/core/vendor/src/crfsuite/lib/cqdb/Makefile.am +21 -0
  65. data/core/vendor/src/crfsuite/lib/cqdb/Makefile.in +549 -0
  66. data/core/vendor/src/crfsuite/lib/cqdb/cqdb.vcxproj +86 -0
  67. data/core/vendor/src/crfsuite/lib/cqdb/include/cqdb.h +524 -0
  68. data/core/vendor/src/crfsuite/lib/cqdb/src/cqdb.c +587 -0
  69. data/core/vendor/src/crfsuite/lib/cqdb/src/lookup3.c +976 -0
  70. data/core/vendor/src/crfsuite/lib/crf/Makefile.am +46 -0
  71. data/core/vendor/src/crfsuite/lib/crf/Makefile.in +721 -0
  72. data/core/vendor/src/crfsuite/lib/crf/crf.vcxproj +216 -0
  73. data/core/vendor/src/crfsuite/lib/crf/src/crf1d.h +353 -0
  74. data/core/vendor/src/crfsuite/lib/crf/src/crf1d_context.c +705 -0
  75. data/core/vendor/src/crfsuite/lib/crf/src/crf1d_encode.c +943 -0
  76. data/core/vendor/src/crfsuite/lib/crf/src/crf1d_feature.c +352 -0
  77. data/core/vendor/src/crfsuite/lib/crf/src/crf1d_model.c +994 -0
  78. data/core/vendor/src/crfsuite/lib/crf/src/crf1d_tag.c +550 -0
  79. data/core/vendor/src/crfsuite/lib/crf/src/crfsuite.c +492 -0
  80. data/core/vendor/src/crfsuite/lib/crf/src/crfsuite_internal.h +236 -0
  81. data/core/vendor/src/crfsuite/lib/crf/src/crfsuite_train.c +272 -0
  82. data/core/vendor/src/crfsuite/lib/crf/src/dataset.c +106 -0
  83. data/core/vendor/src/crfsuite/lib/crf/src/dictionary.c +118 -0
  84. data/core/vendor/src/crfsuite/lib/crf/src/holdout.c +80 -0
  85. data/core/vendor/src/crfsuite/lib/crf/src/logging.c +91 -0
  86. data/core/vendor/src/crfsuite/lib/crf/src/logging.h +48 -0
  87. data/core/vendor/src/crfsuite/lib/crf/src/params.c +335 -0
  88. data/core/vendor/src/crfsuite/lib/crf/src/params.h +80 -0
  89. data/core/vendor/src/crfsuite/lib/crf/src/quark.c +172 -0
  90. data/core/vendor/src/crfsuite/lib/crf/src/quark.h +46 -0
  91. data/core/vendor/src/crfsuite/lib/crf/src/rumavl.c +1107 -0
  92. data/core/vendor/src/crfsuite/lib/crf/src/rumavl.h +160 -0
  93. data/core/vendor/src/crfsuite/lib/crf/src/train_arow.c +408 -0
  94. data/core/vendor/src/crfsuite/lib/crf/src/train_averaged_perceptron.c +242 -0
  95. data/core/vendor/src/crfsuite/lib/crf/src/train_l2sgd.c +507 -0
  96. data/core/vendor/src/crfsuite/lib/crf/src/train_lbfgs.c +338 -0
  97. data/core/vendor/src/crfsuite/lib/crf/src/train_passive_aggressive.c +435 -0
  98. data/core/vendor/src/crfsuite/lib/crf/src/vecmath.h +341 -0
  99. data/core/vendor/src/crfsuite/ltmain.sh +8413 -0
  100. data/core/vendor/src/crfsuite/missing +376 -0
  101. data/core/vendor/src/crfsuite/swig/Makefile.am +13 -0
  102. data/core/vendor/src/crfsuite/swig/Makefile.in +365 -0
  103. data/core/vendor/src/crfsuite/swig/crfsuite.cpp +2 -0
  104. data/core/vendor/src/crfsuite/swig/export.i +32 -0
  105. data/core/vendor/src/crfsuite/swig/python/README +92 -0
  106. data/core/vendor/src/crfsuite/swig/python/crfsuite.py +329 -0
  107. data/core/vendor/src/crfsuite/swig/python/export_wrap.cpp +14355 -0
  108. data/core/vendor/src/crfsuite/swig/python/export_wrap.h +63 -0
  109. data/core/vendor/src/crfsuite/swig/python/prepare.sh +9 -0
  110. data/core/vendor/src/crfsuite/swig/python/sample_tag.py +52 -0
  111. data/core/vendor/src/crfsuite/swig/python/sample_train.py +68 -0
  112. data/core/vendor/src/crfsuite/swig/python/setup.py +44 -0
  113. data/core/vendor/src/crfsuite/win32/stdint.h +679 -0
  114. data/core/vendor/src/liblbfgs/AUTHORS +1 -0
  115. data/core/vendor/src/liblbfgs/COPYING +22 -0
  116. data/core/vendor/src/liblbfgs/ChangeLog +120 -0
  117. data/core/vendor/src/liblbfgs/INSTALL +231 -0
  118. data/core/vendor/src/liblbfgs/Makefile.am +10 -0
  119. data/core/vendor/src/liblbfgs/Makefile.in +638 -0
  120. data/core/vendor/src/liblbfgs/NEWS +0 -0
  121. data/core/vendor/src/liblbfgs/README +71 -0
  122. data/core/vendor/src/liblbfgs/aclocal.m4 +6985 -0
  123. data/core/vendor/src/liblbfgs/autogen.sh +38 -0
  124. data/core/vendor/src/liblbfgs/config.guess +1411 -0
  125. data/core/vendor/src/liblbfgs/config.h.in +64 -0
  126. data/core/vendor/src/liblbfgs/config.sub +1500 -0
  127. data/core/vendor/src/liblbfgs/configure +21146 -0
  128. data/core/vendor/src/liblbfgs/configure.in +107 -0
  129. data/core/vendor/src/liblbfgs/depcomp +522 -0
  130. data/core/vendor/src/liblbfgs/include/lbfgs.h +745 -0
  131. data/core/vendor/src/liblbfgs/install-sh +322 -0
  132. data/core/vendor/src/liblbfgs/lbfgs.sln +26 -0
  133. data/core/vendor/src/liblbfgs/lib/Makefile.am +24 -0
  134. data/core/vendor/src/liblbfgs/lib/Makefile.in +499 -0
  135. data/core/vendor/src/liblbfgs/lib/arithmetic_ansi.h +133 -0
  136. data/core/vendor/src/liblbfgs/lib/arithmetic_sse_double.h +294 -0
  137. data/core/vendor/src/liblbfgs/lib/arithmetic_sse_float.h +298 -0
  138. data/core/vendor/src/liblbfgs/lib/lbfgs.c +1371 -0
  139. data/core/vendor/src/liblbfgs/lib/lib.vcxproj +95 -0
  140. data/core/vendor/src/liblbfgs/ltmain.sh +6426 -0
  141. data/core/vendor/src/liblbfgs/missing +353 -0
  142. data/core/vendor/src/liblbfgs/sample/Makefile.am +15 -0
  143. data/core/vendor/src/liblbfgs/sample/Makefile.in +433 -0
  144. data/core/vendor/src/liblbfgs/sample/sample.c +81 -0
  145. data/core/vendor/src/liblbfgs/sample/sample.cpp +126 -0
  146. data/core/vendor/src/liblbfgs/sample/sample.vcxproj +105 -0
  147. data/core/vendor/src/svm_light/LICENSE.txt +59 -0
  148. data/core/vendor/src/svm_light/Makefile +105 -0
  149. data/core/vendor/src/svm_light/kernel.h +40 -0
  150. data/core/vendor/src/svm_light/svm_classify.c +197 -0
  151. data/core/vendor/src/svm_light/svm_common.c +985 -0
  152. data/core/vendor/src/svm_light/svm_common.h +301 -0
  153. data/core/vendor/src/svm_light/svm_hideo.c +1062 -0
  154. data/core/vendor/src/svm_light/svm_learn.c +4147 -0
  155. data/core/vendor/src/svm_light/svm_learn.h +169 -0
  156. data/core/vendor/src/svm_light/svm_learn_main.c +397 -0
  157. data/core/vendor/src/svm_light/svm_loqo.c +211 -0
  158. data/ext/hack/Rakefile +17 -0
  159. data/ext/hack/support.rb +88 -0
  160. data/lib/opener/opinion_detector_basic.rb +91 -0
  161. data/lib/opener/opinion_detector_basic/public/markdown.css +284 -0
  162. data/lib/opener/opinion_detector_basic/server.rb +16 -0
  163. data/lib/opener/opinion_detector_basic/version.rb +5 -0
  164. data/lib/opener/opinion_detector_basic/views/index.erb +97 -0
  165. data/lib/opener/opinion_detector_basic/views/result.erb +15 -0
  166. data/opener-opinion-detector-basic.gemspec +36 -0
  167. data/pre_build_requirements.txt +1 -0
  168. metadata +309 -0
@@ -0,0 +1,169 @@
1
+ /***********************************************************************/
2
+ /* */
3
+ /* svm_learn.h */
4
+ /* */
5
+ /* Declarations for learning module of Support Vector Machine. */
6
+ /* */
7
+ /* Author: Thorsten Joachims */
8
+ /* Date: 02.07.02 */
9
+ /* */
10
+ /* Copyright (c) 2002 Thorsten Joachims - All rights reserved */
11
+ /* */
12
+ /* This software is available for non-commercial use only. It must */
13
+ /* not be modified and distributed without prior permission of the */
14
+ /* author. The author is not responsible for implications from the */
15
+ /* use of this software. */
16
+ /* */
17
+ /***********************************************************************/
18
+
19
+ #ifndef SVM_LEARN
20
+ #define SVM_LEARN
21
+
22
+ void svm_learn_classification(DOC **, double *, long, long, LEARN_PARM *,
23
+ KERNEL_PARM *, KERNEL_CACHE *, MODEL *,
24
+ double *);
25
+ void svm_learn_regression(DOC **, double *, long, long, LEARN_PARM *,
26
+ KERNEL_PARM *, KERNEL_CACHE **, MODEL *);
27
+ void svm_learn_ranking(DOC **, double *, long, long, LEARN_PARM *,
28
+ KERNEL_PARM *, KERNEL_CACHE **, MODEL *);
29
+ void svm_learn_optimization(DOC **, double *, long, long, LEARN_PARM *,
30
+ KERNEL_PARM *, KERNEL_CACHE *, MODEL *,
31
+ double *);
32
+ long optimize_to_convergence(DOC **, long *, long, long, LEARN_PARM *,
33
+ KERNEL_PARM *, KERNEL_CACHE *, SHRINK_STATE *,
34
+ MODEL *, long *, long *, double *,
35
+ double *, double *,
36
+ TIMING *, double *, long, long);
37
+ long optimize_to_convergence_sharedslack(DOC **, long *, long, long,
38
+ LEARN_PARM *,
39
+ KERNEL_PARM *, KERNEL_CACHE *, SHRINK_STATE *,
40
+ MODEL *, double *, double *, double *,
41
+ TIMING *, double *);
42
+ double compute_objective_function(double *, double *, double *, double,
43
+ long *, long *);
44
+ void clear_index(long *);
45
+ void add_to_index(long *, long);
46
+ long compute_index(long *,long, long *);
47
+ void optimize_svm(DOC **, long *, long *, long *, double, long *, long *,
48
+ MODEL *,
49
+ long, long *, long, double *, double *, double *,
50
+ LEARN_PARM *, CFLOAT *, KERNEL_PARM *, QP *, double *);
51
+ void compute_matrices_for_optimization(DOC **, long *, long *, long *, double,
52
+ long *,
53
+ long *, long *, MODEL *, double *,
54
+ double *, double *, long, long, LEARN_PARM *,
55
+ CFLOAT *, KERNEL_PARM *, QP *);
56
+ long calculate_svm_model(DOC **, long *, long *, double *, double *,
57
+ double *, double *, LEARN_PARM *, long *,
58
+ long *, MODEL *);
59
+ long check_optimality(MODEL *, long *, long *, double *, double *,
60
+ double *, long,
61
+ LEARN_PARM *,double *, double, long *, long *, long *,
62
+ long *, long, KERNEL_PARM *);
63
+ long check_optimality_sharedslack(DOC **docs, MODEL *model, long int *label,
64
+ double *a, double *lin, double *c, double *slack,
65
+ double *alphaslack, long int totdoc,
66
+ LEARN_PARM *learn_parm, double *maxdiff,
67
+ double epsilon_crit_org, long int *misclassified,
68
+ long int *active2dnum,
69
+ long int *last_suboptimal_at,
70
+ long int iteration, KERNEL_PARM *kernel_parm);
71
+ void compute_shared_slacks(DOC **docs, long int *label, double *a,
72
+ double *lin, double *c, long int *active2dnum,
73
+ LEARN_PARM *learn_parm,
74
+ double *slack, double *alphaslack);
75
+ long identify_inconsistent(double *, long *, long *, long, LEARN_PARM *,
76
+ long *, long *);
77
+ long identify_misclassified(double *, long *, long *, long,
78
+ MODEL *, long *, long *);
79
+ long identify_one_misclassified(double *, long *, long *, long,
80
+ MODEL *, long *, long *);
81
+ long incorporate_unlabeled_examples(MODEL *, long *,long *, long *,
82
+ double *, double *, long, double *,
83
+ long *, long *, long, KERNEL_PARM *,
84
+ LEARN_PARM *);
85
+ void update_linear_component(DOC **, long *, long *, double *, double *,
86
+ long *, long, long, KERNEL_PARM *,
87
+ KERNEL_CACHE *, double *,
88
+ CFLOAT *, double *);
89
+ long select_next_qp_subproblem_grad(long *, long *, double *,
90
+ double *, double *, long,
91
+ long, LEARN_PARM *, long *, long *,
92
+ long *, double *, long *, KERNEL_CACHE *,
93
+ long, long *, long *);
94
+ long select_next_qp_subproblem_rand(long *, long *, double *,
95
+ double *, double *, long,
96
+ long, LEARN_PARM *, long *, long *,
97
+ long *, double *, long *, KERNEL_CACHE *,
98
+ long *, long *, long);
99
+ long select_next_qp_slackset(DOC **docs, long int *label, double *a,
100
+ double *lin, double *slack, double *alphaslack,
101
+ double *c, LEARN_PARM *learn_parm,
102
+ long int *active2dnum, double *maxviol);
103
+ void select_top_n(double *, long, long *, long);
104
+ void init_shrink_state(SHRINK_STATE *, long, long);
105
+ void shrink_state_cleanup(SHRINK_STATE *);
106
+ long shrink_problem(DOC **, LEARN_PARM *, SHRINK_STATE *, KERNEL_PARM *,
107
+ long *, long *, long, long, long, double *, long *);
108
+ void reactivate_inactive_examples(long *, long *, double *, SHRINK_STATE *,
109
+ double *, double*, long, long, long, LEARN_PARM *,
110
+ long *, DOC **, KERNEL_PARM *,
111
+ KERNEL_CACHE *, MODEL *, CFLOAT *,
112
+ double *, double *);
113
+
114
+ /* cache kernel evalutations to improve speed */
115
+ KERNEL_CACHE *kernel_cache_init(long, long);
116
+ void kernel_cache_cleanup(KERNEL_CACHE *);
117
+ void get_kernel_row(KERNEL_CACHE *,DOC **, long, long, long *, CFLOAT *,
118
+ KERNEL_PARM *);
119
+ void cache_kernel_row(KERNEL_CACHE *,DOC **, long, KERNEL_PARM *);
120
+ void cache_multiple_kernel_rows(KERNEL_CACHE *,DOC **, long *, long,
121
+ KERNEL_PARM *);
122
+ void kernel_cache_shrink(KERNEL_CACHE *,long, long, long *);
123
+ void kernel_cache_reset_lru(KERNEL_CACHE *);
124
+ long kernel_cache_malloc(KERNEL_CACHE *);
125
+ void kernel_cache_free(KERNEL_CACHE *,long);
126
+ long kernel_cache_free_lru(KERNEL_CACHE *);
127
+ CFLOAT *kernel_cache_clean_and_malloc(KERNEL_CACHE *,long);
128
+ long kernel_cache_touch(KERNEL_CACHE *,long);
129
+ long kernel_cache_check(KERNEL_CACHE *,long);
130
+ long kernel_cache_space_available(KERNEL_CACHE *);
131
+
132
+ void compute_xa_estimates(MODEL *, long *, long *, long, DOC **,
133
+ double *, double *, KERNEL_PARM *,
134
+ LEARN_PARM *, double *, double *, double *);
135
+ double xa_estimate_error(MODEL *, long *, long *, long, DOC **,
136
+ double *, double *, KERNEL_PARM *,
137
+ LEARN_PARM *);
138
+ double xa_estimate_recall(MODEL *, long *, long *, long, DOC **,
139
+ double *, double *, KERNEL_PARM *,
140
+ LEARN_PARM *);
141
+ double xa_estimate_precision(MODEL *, long *, long *, long, DOC **,
142
+ double *, double *, KERNEL_PARM *,
143
+ LEARN_PARM *);
144
+ void avg_similarity_of_sv_of_one_class(MODEL *, DOC **, double *, long *, KERNEL_PARM *, double *, double *);
145
+ double most_similar_sv_of_same_class(MODEL *, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *);
146
+ double distribute_alpha_t_greedily(long *, long, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *, double);
147
+ double distribute_alpha_t_greedily_noindex(MODEL *, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *, double);
148
+ void estimate_transduction_quality(MODEL *, long *, long *, long, DOC **, double *);
149
+ double estimate_margin_vcdim(MODEL *, double, double, KERNEL_PARM *);
150
+ double estimate_sphere(MODEL *, KERNEL_PARM *);
151
+ double estimate_r_delta_average(DOC **, long, KERNEL_PARM *);
152
+ double estimate_r_delta(DOC **, long, KERNEL_PARM *);
153
+ double length_of_longest_document_vector(DOC **, long, KERNEL_PARM *);
154
+
155
+ void write_model(char *, MODEL *);
156
+ void write_prediction(char *, MODEL *, double *, double *, long *, long *,
157
+ long, LEARN_PARM *);
158
+ void write_alphas(char *, double *, long *, long);
159
+
160
+ typedef struct cache_parm_s {
161
+ KERNEL_CACHE *kernel_cache;
162
+ CFLOAT *cache;
163
+ DOC **docs;
164
+ long m;
165
+ KERNEL_PARM *kernel_parm;
166
+ long offset,stepsize;
167
+ } cache_parm_t;
168
+
169
+ #endif
@@ -0,0 +1,397 @@
1
+ /***********************************************************************/
2
+ /* */
3
+ /* svm_learn_main.c */
4
+ /* */
5
+ /* Command line interface to the learning module of the */
6
+ /* Support Vector Machine. */
7
+ /* */
8
+ /* Author: Thorsten Joachims */
9
+ /* Date: 02.07.02 */
10
+ /* */
11
+ /* Copyright (c) 2000 Thorsten Joachims - All rights reserved */
12
+ /* */
13
+ /* This software is available for non-commercial use only. It must */
14
+ /* not be modified and distributed without prior permission of the */
15
+ /* author. The author is not responsible for implications from the */
16
+ /* use of this software. */
17
+ /* */
18
+ /***********************************************************************/
19
+
20
+
21
+ /* if svm-learn is used out of C++, define it as extern "C" */
22
+ #ifdef __cplusplus
23
+ extern "C" {
24
+ #endif
25
+
26
+ # include "svm_common.h"
27
+ # include "svm_learn.h"
28
+
29
+ #ifdef __cplusplus
30
+ }
31
+ #endif
32
+
33
+ char docfile[200]; /* file with training examples */
34
+ char modelfile[200]; /* file for resulting classifier */
35
+ char restartfile[200]; /* file with initial alphas */
36
+
37
+ void read_input_parameters(int, char **, char *, char *, char *, long *,
38
+ LEARN_PARM *, KERNEL_PARM *);
39
+ void wait_any_key();
40
+ void print_help();
41
+
42
+
43
+
44
+ int main (int argc, char* argv[])
45
+ {
46
+ DOC **docs; /* training examples */
47
+ long totwords,totdoc,i;
48
+ double *target;
49
+ double *alpha_in=NULL;
50
+ KERNEL_CACHE *kernel_cache;
51
+ LEARN_PARM learn_parm;
52
+ KERNEL_PARM kernel_parm;
53
+ MODEL *model=(MODEL *)my_malloc(sizeof(MODEL));
54
+
55
+ read_input_parameters(argc,argv,docfile,modelfile,restartfile,&verbosity,
56
+ &learn_parm,&kernel_parm);
57
+ read_documents(docfile,&docs,&target,&totwords,&totdoc);
58
+ if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc);
59
+
60
+ if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */
61
+ kernel_cache=NULL;
62
+ }
63
+ else {
64
+ /* Always get a new kernel cache. It is not possible to use the
65
+ same cache for two different training runs */
66
+ kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size);
67
+ }
68
+
69
+ if(learn_parm.type == CLASSIFICATION) {
70
+ svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,
71
+ &kernel_parm,kernel_cache,model,alpha_in);
72
+ }
73
+ else if(learn_parm.type == REGRESSION) {
74
+ svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,
75
+ &kernel_parm,&kernel_cache,model);
76
+ }
77
+ else if(learn_parm.type == RANKING) {
78
+ svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm,
79
+ &kernel_parm,&kernel_cache,model);
80
+ }
81
+ else if(learn_parm.type == OPTIMIZATION) {
82
+ svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm,
83
+ &kernel_parm,kernel_cache,model,alpha_in);
84
+ }
85
+
86
+ if(kernel_cache) {
87
+ /* Free the memory used for the cache. */
88
+ kernel_cache_cleanup(kernel_cache);
89
+ }
90
+
91
+ /* Warning: The model contains references to the original data 'docs'.
92
+ If you want to free the original data, and only keep the model, you
93
+ have to make a deep copy of 'model'. */
94
+ /* deep_copy_of_model=copy_model(model); */
95
+ write_model(modelfile,model);
96
+
97
+ free(alpha_in);
98
+ free_model(model,0);
99
+ for(i=0;i<totdoc;i++)
100
+ free_example(docs[i],1);
101
+ free(docs);
102
+ free(target);
103
+
104
+ return(0);
105
+ }
106
+
107
+ /*---------------------------------------------------------------------------*/
108
+
109
+ void read_input_parameters(int argc,char *argv[],char *docfile,char *modelfile,
110
+ char *restartfile,long *verbosity,
111
+ LEARN_PARM *learn_parm,KERNEL_PARM *kernel_parm)
112
+ {
113
+ long i;
114
+ char type[100];
115
+
116
+ /* set default */
117
+ strcpy (modelfile, "svm_model");
118
+ strcpy (learn_parm->predfile, "trans_predictions");
119
+ strcpy (learn_parm->alphafile, "");
120
+ strcpy (restartfile, "");
121
+ (*verbosity)=1;
122
+ learn_parm->biased_hyperplane=1;
123
+ learn_parm->sharedslack=0;
124
+ learn_parm->remove_inconsistent=0;
125
+ learn_parm->skip_final_opt_check=0;
126
+ learn_parm->svm_maxqpsize=10;
127
+ learn_parm->svm_newvarsinqp=0;
128
+ learn_parm->svm_iter_to_shrink=-9999;
129
+ learn_parm->maxiter=100000;
130
+ learn_parm->kernel_cache_size=40;
131
+ learn_parm->svm_c=0.0;
132
+ learn_parm->eps=0.1;
133
+ learn_parm->transduction_posratio=-1.0;
134
+ learn_parm->svm_costratio=1.0;
135
+ learn_parm->svm_costratio_unlab=1.0;
136
+ learn_parm->svm_unlabbound=1E-5;
137
+ learn_parm->epsilon_crit=0.001;
138
+ learn_parm->epsilon_a=1E-15;
139
+ learn_parm->compute_loo=0;
140
+ learn_parm->rho=1.0;
141
+ learn_parm->xa_depth=0;
142
+ kernel_parm->kernel_type=0;
143
+ kernel_parm->poly_degree=3;
144
+ kernel_parm->rbf_gamma=1.0;
145
+ kernel_parm->coef_lin=1;
146
+ kernel_parm->coef_const=1;
147
+ strcpy(kernel_parm->custom,"empty");
148
+ strcpy(type,"c");
149
+
150
+ for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
151
+ switch ((argv[i])[1])
152
+ {
153
+ case '?': print_help(); exit(0);
154
+ case 'z': i++; strcpy(type,argv[i]); break;
155
+ case 'v': i++; (*verbosity)=atol(argv[i]); break;
156
+ case 'b': i++; learn_parm->biased_hyperplane=atol(argv[i]); break;
157
+ case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;
158
+ case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break;
159
+ case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
160
+ case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
161
+ case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
162
+ case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
163
+ case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
164
+ case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;
165
+ case 'w': i++; learn_parm->eps=atof(argv[i]); break;
166
+ case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break;
167
+ case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break;
168
+ case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break;
169
+ case 'o': i++; learn_parm->rho=atof(argv[i]); break;
170
+ case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break;
171
+ case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break;
172
+ case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
173
+ case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
174
+ case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
175
+ case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
176
+ case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
177
+ case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
178
+ case 'l': i++; strcpy(learn_parm->predfile,argv[i]); break;
179
+ case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
180
+ case 'y': i++; strcpy(restartfile,argv[i]); break;
181
+ default: printf("\nUnrecognized option %s!\n\n",argv[i]);
182
+ print_help();
183
+ exit(0);
184
+ }
185
+ }
186
+ if(i>=argc) {
187
+ printf("\nNot enough input parameters!\n\n");
188
+ wait_any_key();
189
+ print_help();
190
+ exit(0);
191
+ }
192
+ strcpy (docfile, argv[i]);
193
+ if((i+1)<argc) {
194
+ strcpy (modelfile, argv[i+1]);
195
+ }
196
+ if(learn_parm->svm_iter_to_shrink == -9999) {
197
+ if(kernel_parm->kernel_type == LINEAR)
198
+ learn_parm->svm_iter_to_shrink=2;
199
+ else
200
+ learn_parm->svm_iter_to_shrink=100;
201
+ }
202
+ if(strcmp(type,"c")==0) {
203
+ learn_parm->type=CLASSIFICATION;
204
+ }
205
+ else if(strcmp(type,"r")==0) {
206
+ learn_parm->type=REGRESSION;
207
+ }
208
+ else if(strcmp(type,"p")==0) {
209
+ learn_parm->type=RANKING;
210
+ }
211
+ else if(strcmp(type,"o")==0) {
212
+ learn_parm->type=OPTIMIZATION;
213
+ }
214
+ else if(strcmp(type,"s")==0) {
215
+ learn_parm->type=OPTIMIZATION;
216
+ learn_parm->sharedslack=1;
217
+ }
218
+ else {
219
+ printf("\nUnknown type '%s': Valid types are 'c' (classification), 'r' regession, and 'p' preference ranking.\n",type);
220
+ wait_any_key();
221
+ print_help();
222
+ exit(0);
223
+ }
224
+ if((learn_parm->skip_final_opt_check)
225
+ && (kernel_parm->kernel_type == LINEAR)) {
226
+ printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
227
+ learn_parm->skip_final_opt_check=0;
228
+ }
229
+ if((learn_parm->skip_final_opt_check)
230
+ && (learn_parm->remove_inconsistent)) {
231
+ printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
232
+ wait_any_key();
233
+ print_help();
234
+ exit(0);
235
+ }
236
+ if((learn_parm->svm_maxqpsize<2)) {
237
+ printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
238
+ wait_any_key();
239
+ print_help();
240
+ exit(0);
241
+ }
242
+ if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
243
+ printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
244
+ printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
245
+ wait_any_key();
246
+ print_help();
247
+ exit(0);
248
+ }
249
+ if(learn_parm->svm_iter_to_shrink<1) {
250
+ printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
251
+ wait_any_key();
252
+ print_help();
253
+ exit(0);
254
+ }
255
+ if(learn_parm->svm_c<0) {
256
+ printf("\nThe C parameter must be greater than zero!\n\n");
257
+ wait_any_key();
258
+ print_help();
259
+ exit(0);
260
+ }
261
+ if(learn_parm->transduction_posratio>1) {
262
+ printf("\nThe fraction of unlabeled examples to classify as positives must\n");
263
+ printf("be less than 1.0 !!!\n\n");
264
+ wait_any_key();
265
+ print_help();
266
+ exit(0);
267
+ }
268
+ if(learn_parm->svm_costratio<=0) {
269
+ printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
270
+ wait_any_key();
271
+ print_help();
272
+ exit(0);
273
+ }
274
+ if(learn_parm->epsilon_crit<=0) {
275
+ printf("\nThe epsilon parameter must be greater than zero!\n\n");
276
+ wait_any_key();
277
+ print_help();
278
+ exit(0);
279
+ }
280
+ if(learn_parm->rho<0) {
281
+ printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
282
+ printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
283
+ printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
284
+ wait_any_key();
285
+ print_help();
286
+ exit(0);
287
+ }
288
+ if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
289
+ printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
290
+ printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
291
+ printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
292
+ wait_any_key();
293
+ print_help();
294
+ exit(0);
295
+ }
296
+ }
297
+
298
+ void wait_any_key()
299
+ {
300
+ printf("\n(more)\n");
301
+ (void)getc(stdin);
302
+ }
303
+
304
+ void print_help()
305
+ {
306
+ printf("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE);
307
+ copyright_notice();
308
+ printf(" usage: svm_learn [options] example_file model_file\n\n");
309
+ printf("Arguments:\n");
310
+ printf(" example_file-> file with training data\n");
311
+ printf(" model_file -> file to store learned decision rule in\n");
312
+
313
+ printf("General options:\n");
314
+ printf(" -? -> this help\n");
315
+ printf(" -v [0..3] -> verbosity level (default 1)\n");
316
+ printf("Learning options:\n");
317
+ printf(" -z {c,r,p} -> select between classification (c), regression (r),\n");
318
+ printf(" and preference ranking (p) (default classification)\n");
319
+ printf(" -c float -> C: trade-off between training error\n");
320
+ printf(" and margin (default [avg. x*x]^-1)\n");
321
+ printf(" -w [0..] -> epsilon width of tube for regression\n");
322
+ printf(" (default 0.1)\n");
323
+ printf(" -j float -> Cost: cost-factor, by which training errors on\n");
324
+ printf(" positive examples outweight errors on negative\n");
325
+ printf(" examples (default 1) (see [4])\n");
326
+ printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n");
327
+ printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n");
328
+ printf(" -i [0,1] -> remove inconsistent training examples\n");
329
+ printf(" and retrain (default 0)\n");
330
+ printf("Performance estimation options:\n");
331
+ printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n");
332
+ printf(" (see [5])\n");
333
+ printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n");
334
+ printf(" leave-one-out computation (default 1.0) (see [2])\n");
335
+ printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n");
336
+ printf(" (default 0)\n");
337
+ printf("Transduction options (see [3]):\n");
338
+ printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n");
339
+ printf(" into the positive class (default is the ratio of\n");
340
+ printf(" positive and negative examples in the training data)\n");
341
+ printf("Kernel options:\n");
342
+ printf(" -t int -> type of kernel function:\n");
343
+ printf(" 0: linear (default)\n");
344
+ printf(" 1: polynomial (s a*b+c)^d\n");
345
+ printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n");
346
+ printf(" 3: sigmoid tanh(s a*b + c)\n");
347
+ printf(" 4: user defined kernel from kernel.h\n");
348
+ printf(" -d int -> parameter d in polynomial kernel\n");
349
+ printf(" -g float -> parameter gamma in rbf kernel\n");
350
+ printf(" -s float -> parameter s in sigmoid/poly kernel\n");
351
+ printf(" -r float -> parameter c in sigmoid/poly kernel\n");
352
+ printf(" -u string -> parameter of user defined kernel\n");
353
+ printf("Optimization options (see [1]):\n");
354
+ printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n");
355
+ printf(" -n [2..q] -> number of new variables entering the working set\n");
356
+ printf(" in each iteration (default n = q). Set n<q to prevent\n");
357
+ printf(" zig-zagging.\n");
358
+ printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n");
359
+ printf(" The larger the faster...\n");
360
+ printf(" -e float -> eps: Allow that error for termination criterion\n");
361
+ printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n");
362
+ printf(" -y [0,1] -> restart the optimization from alpha values in file\n");
363
+ printf(" specified by -a option. (default 0)\n");
364
+ printf(" -h [5..] -> number of iterations a variable needs to be\n");
365
+ printf(" optimal before considered for shrinking (default 100)\n");
366
+ printf(" -f [0,1] -> do final optimality check for variables removed\n");
367
+ printf(" by shrinking. Although this test is usually \n");
368
+ printf(" positive, there is no guarantee that the optimum\n");
369
+ printf(" was found if the test is omitted. (default 1)\n");
370
+ printf(" -y string -> if option is given, reads alphas from file with given\n");
371
+ printf(" and uses them as starting point. (default 'disabled')\n");
372
+ printf(" -# int -> terminate optimization, if no progress after this\n");
373
+ printf(" number of iterations. (default 100000)\n");
374
+ printf("Output options:\n");
375
+ printf(" -l string -> file to write predicted labels of unlabeled\n");
376
+ printf(" examples into after transductive learning\n");
377
+ printf(" -a string -> write all alphas to this file after learning\n");
378
+ printf(" (in the same order as in the training set)\n");
379
+ wait_any_key();
380
+ printf("\nMore details in:\n");
381
+ printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
382
+ printf(" Kernel Methods - Support Vector Learning, B. Sch�lkopf and C. Burges and\n");
383
+ printf(" A. Smola (ed.), MIT Press, 1999.\n");
384
+ printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n");
385
+ printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n");
386
+ printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n");
387
+ printf(" Vector Machines. International Conference on Machine Learning (ICML),\n");
388
+ printf(" 1999.\n");
389
+ printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n");
390
+ printf(" with a knowledge-based approach - A case study in intensive care \n");
391
+ printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n");
392
+ printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n");
393
+ printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
394
+ printf(" 2002.\n\n");
395
+ }
396
+
397
+