Orfeo Toolbox  4.0
itkSPSAOptimizer.cxx
Go to the documentation of this file.
1 /*=========================================================================
2  *
3  * Copyright Insight Software Consortium
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0.txt
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  *=========================================================================*/
18 #ifndef __itkSPSAOptimizer_cxx
19 #define __itkSPSAOptimizer_cxx
20 
21 #include "itkSPSAOptimizer.h"
22 
23 #include "itkMath.h"
24 
25 namespace itk
26 {
32 {
33  itkDebugMacro("Constructor");
34 
35  m_CurrentIteration = 0;
36  m_Maximize = false;
37  m_StopCondition = Unknown;
38  m_StateOfConvergenceDecayRate = 0.9;
39  m_Tolerance = 1e-06;
40  m_StateOfConvergence = 0;
41  m_MaximumNumberOfIterations = 100;
42  m_MinimumNumberOfIterations = 10;
43  m_GradientMagnitude = 0.0;
44  m_NumberOfPerturbations = 1;
45  m_LearningRate = 0.0;
46  m_Sa = 1.0;
47  m_Sc = 1.0;
48  m_A = m_MaximumNumberOfIterations / 10;
49  m_Alpha = 0.602;
50  m_Gamma = 0.101;
52 } // end Constructor
53 
57 void
59 ::PrintSelf(std::ostream & os, Indent indent) const
60 {
61  Superclass::PrintSelf(os, indent);
62 
63  os << indent << "a: " << m_Sa << std::endl;
64  os << indent << "A: " << m_A << std::endl;
65  os << indent << "Alpha: " << m_Alpha << std::endl;
66  os << indent << "c: " << m_Sc << std::endl;
67  os << indent << "Gamma: " << m_Gamma << std::endl;
68  os << indent << "Tolerance: " << m_Tolerance << std::endl;
69  os << indent << "GradientMagnitude: " << m_GradientMagnitude << std::endl;
70  os << indent << "StateOfConvergenceDecayRate: " << m_StateOfConvergenceDecayRate << std::endl;
71  os << indent << "Gradient: " << m_Gradient << std::endl;
72  os << indent << "StateOfConvergence: " << m_StateOfConvergence << std::endl;
73 
74  os << indent << "NumberOfPerturbations: " << m_NumberOfPerturbations << std::endl;
75 
76  os << indent << "LearningRate: "
77  << m_LearningRate << std::endl;
78 
79  os << indent << "MaximumNumberOfIterations: "
80  << m_MaximumNumberOfIterations << std::endl;
81  os << indent << "MinimumNumberOfIterations: "
82  << m_MinimumNumberOfIterations << std::endl;
83 
84  os << indent << "Maximize: "
85  << m_Maximize << std::endl;
86 
87  os << indent << "CurrentIteration: "
88  << m_CurrentIteration;
89  if ( m_CostFunction )
90  {
91  os << indent << "CostFunction: "
92  << m_CostFunction;
93  }
94  os << indent << "StopCondition: "
95  << m_StopCondition;
96  os << std::endl;
97 } // end PrintSelf
98 
105 ::GetValue(const ParametersType & parameters) const
106 {
112  return this->Superclass::GetValue(parameters);
113 }
114 
121 ::GetValue(void) const
122 {
128  return this->GetValue( this->GetCurrentPosition() );
129 }
130 
134 void
137 {
138  itkDebugMacro("StartOptimization");
139 
140  if ( !m_CostFunction )
141  {
142  itkExceptionMacro(<< "No objective function defined! ");
143  }
144 
146  const unsigned int spaceDimension =
147  m_CostFunction->GetNumberOfParameters();
148  if ( spaceDimension != this->GetInitialPosition().GetSize() )
149  {
150  itkExceptionMacro(<< "Number of parameters not correct!");
151  }
152 
153  m_CurrentIteration = 0;
154  m_StopCondition = Unknown;
155  m_StateOfConvergence = 0.0;
156 
157  this->SetCurrentPosition( this->GetInitialPosition() );
158  this->ResumeOptimization();
159 } // end StartOptimization
160 
165 void
168 {
169  itkDebugMacro("ResumeOptimization");
170 
171  m_Stop = false;
172 
173  InvokeEvent( StartEvent() );
174  while ( !m_Stop )
175  {
176  AdvanceOneStep();
177  this->InvokeEvent( IterationEvent() );
178 
179  if ( m_Stop )
180  {
181  break;
182  }
183 
184  m_CurrentIteration++;
185 
186  if ( m_CurrentIteration >= m_MaximumNumberOfIterations )
187  {
188  m_StopCondition = MaximumNumberOfIterations;
189  StopOptimization();
190  break;
191  }
192 
194  if ( ( m_StateOfConvergence < m_Tolerance )
195  && ( m_CurrentIteration >= m_MinimumNumberOfIterations ) )
196  {
197  m_StopCondition = BelowTolerance;
198  StopOptimization();
199  break;
200  }
201  m_StateOfConvergence *= m_StateOfConvergenceDecayRate;
202  } // while !m_stop
203 } // end ResumeOptimization
204 
208 void
211 {
212  itkDebugMacro("StopOptimization");
213  m_Stop = true;
214  InvokeEvent( EndEvent() );
215 } // end StopOptimization
216 
220 void
223 {
224  itkDebugMacro("AdvanceOneStep");
225 
227  double direction;
228  if ( this->m_Maximize )
229  {
230  direction = 1.0;
231  }
232  else
233  {
234  direction = -1.0;
235  }
236 
238  const unsigned int spaceDimension =
239  m_CostFunction->GetNumberOfParameters();
240 
243  ParametersType newPosition(spaceDimension);
244  const ParametersType & currentPosition = this->GetCurrentPosition();
245 
249  try
250  {
251  this->ComputeGradient(currentPosition, m_Gradient);
252  }
253  catch ( ExceptionObject & err )
254  {
255  // An exception has occurred.
256  // Terminate immediately.
257  m_StopCondition = MetricError;
258  StopOptimization();
259  // Pass exception to caller
260  throw err;
261  }
262 
264  const double ak = this->Compute_a(m_CurrentIteration);
266  m_LearningRate = ak;
267 
271  newPosition = currentPosition + ( direction * ak ) * m_Gradient;
272  this->SetCurrentPosition(newPosition);
273 
275  m_GradientMagnitude = m_Gradient.magnitude();
276 
278  m_StateOfConvergence += ak * m_GradientMagnitude;
279 } // end AdvanceOneStep
280 
288 double SPSAOptimizer
290 {
291  return static_cast< double >(
292  m_Sa / vcl_pow(m_A + k + 1, m_Alpha) );
293 } // end Compute_a
294 
302 double SPSAOptimizer
304 {
305  return static_cast< double >(
306  m_Sc / vcl_pow(k + 1, m_Gamma) );
307 } // end Compute_c
308 
317 void SPSAOptimizer
318 ::GenerateDelta(const unsigned int spaceDimension)
319 {
320  m_Delta = DerivativeType(spaceDimension);
321 
322  const ScalesType & scales = this->GetScales();
323  // Make sure the scales have been set properly
324  if ( scales.size() != spaceDimension )
325  {
326  itkExceptionMacro(<< "The size of Scales is "
327  << scales.size()
328  << ", but the NumberOfParameters for the CostFunction is "
329  << spaceDimension
330  << ".");
331  }
332 
333  const ScalesType & invScales = this->GetInverseScales();
334  for ( unsigned int j = 0; j < spaceDimension; ++j )
335  {
337  m_Delta[j] = 2 * Math::Round< int >( this->m_Generator->GetUniformVariate (0.0f, 1.0f) ) - 1;
338 
344  m_Delta[j] *= invScales[j];
345  }
346 } // end GenerateDelta
347 
351 void
353  const ParametersType & parameters,
354  DerivativeType & gradient)
355 {
356  const unsigned int spaceDimension = parameters.GetSize();
357 
359  const double ck = this->Compute_c(m_CurrentIteration);
360 
364  ParametersType thetaplus(spaceDimension);
365  ParametersType thetamin(spaceDimension);
366 
367  gradient = DerivativeType(spaceDimension);
368  gradient.Fill(0.0);
369  const ScalesType & scales = this->GetScales();
370 
374  for ( SizeValueType perturbation = 1;
375  perturbation <= this->GetNumberOfPerturbations();
376  ++perturbation )
377  {
379  this->GenerateDelta(spaceDimension);
380 
382  for ( unsigned int j = 0; j < spaceDimension; j++ )
383  {
384  thetaplus[j] = parameters[j] + ck * m_Delta[j];
385  thetamin[j] = parameters[j] - ck * m_Delta[j];
386  }
387 
389  const double valueplus = this->GetValue(thetaplus);
390 
392  const double valuemin = this->GetValue(thetamin);
393 
395  const double valuediff = ( valueplus - valuemin ) / ( 2 * ck );
396  for ( unsigned int j = 0; j < spaceDimension; j++ )
397  {
398  // remember to divide the gradient by the NumberOfPerturbations!
399  gradient[j] += valuediff / m_Delta[j];
400  }
401  } //end for ++perturbation
402 
404  for ( unsigned int j = 0; j < spaceDimension; j++ )
405  {
406  gradient[j] /= ( vnl_math_sqr(scales[j]) * static_cast< double >( m_NumberOfPerturbations ) );
407  }
447 } //end ComputeGradient
448 
452 void
454  SizeValueType numberOfGradientEstimates,
455  double initialStepSize)
456 {
458  this->SetA(static_cast< double >( this->GetMaximumNumberOfIterations() ) / 10.0);
459 
460  if ( !m_CostFunction )
461  {
462  itkExceptionMacro(<< "No objective function defined! ");
463  }
464 
466  const unsigned int spaceDimension =
467  m_CostFunction->GetNumberOfParameters();
468 
470  const ParametersType & initialPosition = this->GetInitialPosition();
471  if ( spaceDimension != initialPosition.GetSize() )
472  {
473  itkExceptionMacro(<< "Number of parameters not correct!");
474  }
475 
477  DerivativeType averageAbsoluteGradient(spaceDimension);
478  averageAbsoluteGradient.Fill(0.0);
479  m_CurrentIteration = 0;
480  for ( SizeValueType n = 1; n <= numberOfGradientEstimates; ++n )
481  {
482  this->ComputeGradient(initialPosition, m_Gradient);
483  for ( unsigned int j = 0; j < spaceDimension; j++ )
484  {
485  averageAbsoluteGradient[j] += vcl_fabs(m_Gradient[j]);
486  }
487  } // end for ++n
488  averageAbsoluteGradient /= static_cast< double >( numberOfGradientEstimates );
489 
492  this->SetSa( initialStepSize * vcl_pow(m_A + 1.0, m_Alpha)
493  / averageAbsoluteGradient.max_value() );
494 } //end GuessParameters
495 
496 const std::string
498 {
499  std::ostringstream reason;
500 
501  reason << this->GetNameOfClass() << ": ";
502  switch ( m_StopCondition )
503  {
504  case Unknown:
505  reason << "Unknown stop condition";
506  break;
508  reason << "Maximum number of iterations exceeded. Number of iterations is "
510  break;
511  case BelowTolerance:
512  reason << "Below tolerance. " << "Tolerance is " << m_Tolerance;
513  break;
514  case MetricError:
515  reason << "Metric error";
516  break;
517  default:
518  reason << " No reason given for termination ";
519  break;
520  }
521  return reason.str();
522 }
523 } // end namespace itk
524 
525 #endif // end #ifndef __itkSPSAOptimizer_cxx

Generated at Sat Mar 8 2014 15:35:06 for Orfeo Toolbox with doxygen 1.8.3.1