1 | // $Id: SVM.h 295 2005-04-29 09:15:58Z peter $ |
---|
2 | |
---|
3 | #ifndef _theplu_svm_svm_ |
---|
4 | #define _theplu_svm_svm_ |
---|
5 | |
---|
6 | #include <c++_tools/svm/Kernel.h> |
---|
7 | #include <c++_tools/gslapi/vector.h> |
---|
8 | |
---|
9 | #include <utility> |
---|
10 | #include <vector> |
---|
11 | |
---|
12 | |
---|
13 | namespace theplu { |
---|
14 | namespace svm { |
---|
15 | /// |
---|
16 | /// Class for SVM using Keerthi's second modification of Platt's SMO. Also |
---|
17 | /// the elements of the kernel is not computed sequentially, but the |
---|
18 | /// complete kernel matrix is taken as input and stored in memory. This |
---|
19 | /// means that the training is faster, but also that it is not possible to |
---|
20 | /// train a large number of samples N, since the memory cost for the kernel |
---|
21 | /// matrix is N^2. The SVM object does not contain any data, hence any true |
---|
22 | /// prediction is not possible. |
---|
23 | /// |
---|
24 | class SVM |
---|
25 | { |
---|
26 | |
---|
27 | public: |
---|
28 | /// |
---|
29 | /// Constructor taking the kernel matrix and the target vector as input |
---|
30 | /// |
---|
31 | SVM(const Kernel&, |
---|
32 | const gslapi::vector&, |
---|
33 | const std::vector<size_t>& = std::vector<size_t>()); |
---|
34 | |
---|
35 | /// |
---|
36 | /// Function returns \f$\alpha\f$ |
---|
37 | /// |
---|
38 | inline gslapi::vector get_alpha(void) const { return alpha_; } |
---|
39 | |
---|
40 | /// |
---|
41 | /// Function returns the C-parameter |
---|
42 | /// |
---|
43 | inline double get_c(void) const { return c_; } |
---|
44 | |
---|
45 | /// |
---|
46 | /// @return number of maximal epochs |
---|
47 | /// |
---|
48 | inline long int max_epochs(void) const {return max_epochs_;} |
---|
49 | |
---|
50 | /// |
---|
51 | /// Changing number of maximal epochs |
---|
52 | /// |
---|
53 | inline void max_epochs(const unsigned long int d) {max_epochs_=d;} |
---|
54 | |
---|
55 | /// |
---|
56 | /// @return output |
---|
57 | /// |
---|
58 | inline theplu::gslapi::vector output(void) |
---|
59 | {return kernel_.get() * alpha_.mul(target_)+ |
---|
60 | theplu::gslapi::vector(alpha_.size(),bias_);} |
---|
61 | |
---|
62 | /// |
---|
63 | /// Changing the C-parameter |
---|
64 | /// |
---|
65 | inline void set_c(const double c) {c_ = c;} |
---|
66 | |
---|
67 | /// |
---|
68 | /// Training the SVM following Platt's SMO, with Keerti's |
---|
69 | /// modifacation. However the complete kernel is stored in |
---|
70 | /// memory. The reason for this is speed. When number of samples N |
---|
71 | /// is large this is not possible since the memory cost for the |
---|
72 | /// kernel scales N^2. In that case one should follow the SMO and |
---|
73 | /// calculate the kernel elements sequentially. Minimizing \f$ |
---|
74 | /// \frac{1}{2}\sum |
---|
75 | /// y_iy_j\alpha_i\alpha_j(K_{ij}+\frac{1}{C_i}\delta_{ij}) \f$, |
---|
76 | /// which corresponds to minimizing \f$ \sum w_i^2+\sum |
---|
77 | /// C_i\xi_i^2 \f$ |
---|
78 | /// |
---|
79 | |
---|
80 | bool train(void); |
---|
81 | |
---|
82 | |
---|
83 | private: |
---|
84 | gslapi::vector alpha_; |
---|
85 | double bias_; |
---|
86 | double c_; |
---|
87 | Kernel kernel_; // Peter, const ref? |
---|
88 | unsigned long int max_epochs_; |
---|
89 | gslapi::vector target_; // Peter, const ref? |
---|
90 | bool trained_; |
---|
91 | std::vector<size_t> train_set_; |
---|
92 | double tolerance_; |
---|
93 | |
---|
94 | /// |
---|
95 | /// Private function choosing which two elements that should be |
---|
96 | /// updated. First checking for the biggest violation (output - target = |
---|
97 | /// 0) among support vectors (alpha!=0). If no violation was found check |
---|
98 | /// for sequentially among the other samples. If no violation there as |
---|
99 | /// well, stop_condition is fullfilled. |
---|
100 | /// |
---|
101 | std::pair<size_t, size_t> choose(const theplu::gslapi::vector&, |
---|
102 | const theplu::gslapi::vector&, |
---|
103 | const theplu::gslapi::vector&, |
---|
104 | bool&); |
---|
105 | |
---|
106 | |
---|
107 | |
---|
108 | }; |
---|
109 | |
---|
110 | |
---|
111 | |
---|
112 | |
---|
113 | }} // of namespace svm and namespace theplu |
---|
114 | |
---|
115 | #endif |
---|