OTB  6.7.0
Orfeo Toolbox
otbTrainLibSVM.hxx
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2005-2019 Centre National d'Etudes Spatiales (CNES)
3  *
4  * This file is part of Orfeo Toolbox
5  *
6  * https://www.orfeo-toolbox.org/
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  * http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  */
20 
21 #ifndef otbTrainLibSVM_hxx
22 #define otbTrainLibSVM_hxx
25 
26 namespace otb
27 {
28 namespace Wrapper
29 {
30 
31  template <class TInputValue, class TOutputValue>
32  void
33  LearningApplicationBase<TInputValue,TOutputValue>
34  ::InitLibSVMParams()
35  {
36  AddChoice("classifier.libsvm", "LibSVM classifier");
37  SetParameterDescription("classifier.libsvm", "This group of parameters allows setting SVM classifier parameters.");
38  AddParameter(ParameterType_Choice, "classifier.libsvm.k", "SVM Kernel Type");
39  AddChoice("classifier.libsvm.k.linear", "Linear");
40  SetParameterDescription("classifier.libsvm.k.linear",
41  "Linear Kernel, no mapping is done, this is the fastest option.");
42 
43  AddChoice("classifier.libsvm.k.rbf", "Gaussian radial basis function");
44  SetParameterDescription("classifier.libsvm.k.rbf",
45  "This kernel is a good choice in most of the case. It is "
46  "an exponential function of the euclidian distance between "
47  "the vectors.");
48 
49  AddChoice("classifier.libsvm.k.poly", "Polynomial");
50  SetParameterDescription("classifier.libsvm.k.poly",
51  "Polynomial Kernel, the mapping is a polynomial function.");
52 
53  AddChoice("classifier.libsvm.k.sigmoid", "Sigmoid");
54  SetParameterDescription("classifier.libsvm.k.sigmoid",
55  "The kernel is a hyperbolic tangente function of the vectors.");
56 
57  SetParameterString("classifier.libsvm.k", "linear");
58  SetParameterDescription("classifier.libsvm.k", "SVM Kernel Type.");
59  AddParameter(ParameterType_Choice, "classifier.libsvm.m", "SVM Model Type");
60  SetParameterDescription("classifier.libsvm.m", "Type of SVM formulation.");
61  if (this->m_RegressionFlag)
62  {
63  AddChoice("classifier.libsvm.m.epssvr", "Epsilon Support Vector Regression");
64  SetParameterDescription("classifier.libsvm.m.epssvr",
65  "The distance between feature vectors from the training set and the "
66  "fitting hyper-plane must be less than Epsilon. For outliers the penalty "
67  "multiplier C is used ");
68 
69  AddChoice("classifier.libsvm.m.nusvr", "Nu Support Vector Regression");
70  SetParameterString("classifier.libsvm.m", "epssvr");
71  SetParameterDescription("classifier.libsvm.m.nusvr",
72  "Same as the epsilon regression except that this time the bounded "
73  "parameter nu is used instead of epsilon");
74  }
75  else
76  {
77  AddChoice("classifier.libsvm.m.csvc", "C support vector classification");
78  SetParameterDescription("classifier.libsvm.m.csvc",
79  "This formulation allows imperfect separation of classes. The penalty "
80  "is set through the cost parameter C.");
81 
82  AddChoice("classifier.libsvm.m.nusvc", "Nu support vector classification");
83  SetParameterDescription("classifier.libsvm.m.nusvc",
84  "This formulation allows imperfect separation of classes. The penalty "
85  "is set through the cost parameter Nu. As compared to C, Nu is harder "
86  "to optimize, and may not be as fast.");
87 
88  AddChoice("classifier.libsvm.m.oneclass", "Distribution estimation (One Class SVM)");
89  SetParameterDescription("classifier.libsvm.m.oneclass",
90  "All the training data are from the same class, SVM builds a boundary "
91  "that separates the class from the rest of the feature space.");
92  SetParameterString("classifier.libsvm.m", "csvc");
93  }
94 
95  AddParameter(ParameterType_Float, "classifier.libsvm.c", "Cost parameter C");
96  SetParameterFloat("classifier.libsvm.c",1.0);
97  SetParameterDescription("classifier.libsvm.c",
98  "SVM models have a cost parameter C (1 by default) to control the "
99  "trade-off between training errors and forcing rigid margins.");
100 
101  AddParameter(ParameterType_Float, "classifier.libsvm.nu", "Cost parameter Nu");
102  SetParameterFloat("classifier.libsvm.nu",0.5);
103  SetParameterDescription("classifier.libsvm.nu",
104  "Cost parameter Nu, in the range 0..1, the larger the value, "
105  "the smoother the decision.");
106 
107  // It seems that it miss a nu parameter for the nu-SVM use.
108  AddParameter(ParameterType_Bool, "classifier.libsvm.opt", "Parameters optimization");
109  SetParameterDescription("classifier.libsvm.opt", "SVM parameters optimization flag.");
110 
111  AddParameter(ParameterType_Bool, "classifier.libsvm.prob", "Probability estimation");
112  SetParameterDescription("classifier.libsvm.prob", "Probability estimation flag.");
113 
114  if (this->m_RegressionFlag)
115  {
116  AddParameter(ParameterType_Float, "classifier.libsvm.eps", "Epsilon");
117  SetParameterFloat("classifier.libsvm.eps",1e-3);
118  SetParameterDescription("classifier.libsvm.eps",
119  "The distance between feature vectors from the training set and "
120  "the fitting hyper-plane must be less than Epsilon. For outliers"
121  "the penalty mutliplier is set by C.");
122  }
123  }
124 
125  template <class TInputValue, class TOutputValue>
126  void
127  LearningApplicationBase<TInputValue,TOutputValue>
128  ::TrainLibSVM(typename ListSampleType::Pointer trainingListSample,
129  typename TargetListSampleType::Pointer trainingLabeledListSample,
130  std::string modelPath)
131  {
133  typename LibSVMType::Pointer libSVMClassifier = LibSVMType::New();
134  libSVMClassifier->SetRegressionMode(this->m_RegressionFlag);
135  libSVMClassifier->SetInputListSample(trainingListSample);
136  libSVMClassifier->SetTargetListSample(trainingLabeledListSample);
137  //SVM Option
138  //TODO : Add other options ?
139  libSVMClassifier->SetParameterOptimization(GetParameterInt("classifier.libsvm.opt"));
140  libSVMClassifier->SetDoProbabilityEstimates(GetParameterInt("classifier.libsvm.prob"));
141  libSVMClassifier->SetNu(GetParameterFloat("classifier.libsvm.nu"));
142  libSVMClassifier->SetC(GetParameterFloat("classifier.libsvm.c"));
143 
144  switch (GetParameterInt("classifier.libsvm.k"))
145  {
146  case 0: // LINEAR
147  libSVMClassifier->SetKernelType(LINEAR);
148  break;
149  case 1: // RBF
150  libSVMClassifier->SetKernelType(RBF);
151  break;
152  case 2: // POLY
153  libSVMClassifier->SetKernelType(POLY);
154  break;
155  case 3: // SIGMOID
156  libSVMClassifier->SetKernelType(SIGMOID);
157  break;
158  default: // DEFAULT = LINEAR
159  libSVMClassifier->SetKernelType(LINEAR);
160  break;
161  }
162  if (this->m_RegressionFlag)
163  {
164  switch (GetParameterInt("classifier.libsvm.m"))
165  {
166  case 0: // EPSILON_SVR
167  libSVMClassifier->SetSVMType(EPSILON_SVR);
168  break;
169  case 1: // NU_SVR
170  libSVMClassifier->SetSVMType(NU_SVR);
171  break;
172  default:
173  libSVMClassifier->SetSVMType(EPSILON_SVR);
174  break;
175  }
176  libSVMClassifier->SetEpsilon(GetParameterFloat("classifier.libsvm.eps"));
177  }
178  else
179  {
180  switch (GetParameterInt("classifier.libsvm.m"))
181  {
182  case 0: // C_SVC
183  libSVMClassifier->SetSVMType(C_SVC);
184  break;
185  case 1: // NU_SVC
186  libSVMClassifier->SetSVMType(NU_SVC);
187  break;
188  case 2: // ONE_CLASS
189  libSVMClassifier->SetSVMType(ONE_CLASS);
190  break;
191  default:
192  libSVMClassifier->SetSVMType(C_SVC);
193  break;
194  }
195  }
196 
197 
198  libSVMClassifier->Train();
199  libSVMClassifier->Save(modelPath);
200  }
201 
202 } //end namespace wrapper
203 } //end namespace otb
204 
205 #endif