go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
itk::GenericConjugateGradientOptimizer Class Reference

#include <itkGenericConjugateGradientOptimizer.h>

Detailed Description

A set of conjugate gradient algorithms.

The steplength is determined at each iteration by means of a line search routine. The itk::MoreThuenteLineSearchOptimizer works well.

Definition at line 40 of file itkGenericConjugateGradientOptimizer.h.

Inheritance diagram for itk::GenericConjugateGradientOptimizer:

Public Types

using BetaDefinitionMapType = std::map<BetaDefinitionType, ComputeBetaFunctionType>
 
using BetaDefinitionType = std::string
 
using ComputeBetaFunctionType
 
using ConstPointer = SmartPointer<const Self>
 
using LineSearchOptimizerPointer = LineSearchOptimizerType::Pointer
 
using LineSearchOptimizerType = LineSearchOptimizer
 
using Pointer = SmartPointer<Self>
 
using ScaledCostFunctionType
 
using ScalesType
 
using Self = GenericConjugateGradientOptimizer
 
enum class  StopConditionType : unsigned int {
  MetricError , LineSearchError , MaximumNumberOfIterations , GradientMagnitudeTolerance ,
  ValueTolerance , InfiniteBeta , Unknown
}
 
using Superclass = ScaledSingleValuedNonLinearOptimizer
 
- Public Types inherited from itk::ScaledSingleValuedNonLinearOptimizer
using ConstPointer = SmartPointer<const Self>
 
using Pointer = SmartPointer<Self>
 
using ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer
 
using ScaledCostFunctionType = ScaledSingleValuedCostFunction
 
using ScalesType = NonLinearOptimizer::ScalesType
 
using Self = ScaledSingleValuedNonLinearOptimizer
 
using Superclass = SingleValuedNonLinearOptimizer
 

Public Member Functions

virtual const BetaDefinitionTypeGetBetaDefinition ()
 
virtual const char * GetClassName () const
 
virtual const DerivativeType & GetCurrentGradient ()
 
virtual unsigned long GetCurrentIteration () const
 
virtual double GetCurrentStepLength () const
 
virtual MeasureType GetCurrentValue () const
 
virtual double GetGradientMagnitudeTolerance () const
 
virtual bool GetInLineSearch () const
 
virtual unsigned long GetMaximumNumberOfIterations () const
 
virtual unsigned long GetMaxNrOfItWithoutImprovement () const
 
virtual const StopConditionTypeGetStopCondition ()
 
virtual double GetValueTolerance () const
 
 ITK_DISALLOW_COPY_AND_MOVE (GenericConjugateGradientOptimizer)
 
 itkGetModifiableObjectMacro (LineSearchOptimizer, LineSearchOptimizerType)
 
virtual void ResumeOptimization ()
 
void SetBetaDefinition (const BetaDefinitionType &arg)
 
virtual void SetGradientMagnitudeTolerance (double _arg)
 
virtual void SetLineSearchOptimizer (LineSearchOptimizerType *_arg)
 
virtual void SetMaximumNumberOfIterations (unsigned long _arg)
 
virtual void SetMaxNrOfItWithoutImprovement (unsigned long arg)
 
virtual void SetValueTolerance (double _arg)
 
void StartOptimization () override
 
virtual void StopOptimization ()
 
- Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
const ParametersType & GetCurrentPosition () const override
 
virtual bool GetMaximize () const
 
virtual const ScaledCostFunctionTypeGetScaledCostFunction ()
 
virtual const ParametersType & GetScaledCurrentPosition ()
 
bool GetUseScales () const
 
virtual void InitializeScales ()
 
 ITK_DISALLOW_COPY_AND_MOVE (ScaledSingleValuedNonLinearOptimizer)
 
virtual void MaximizeOff ()
 
virtual void MaximizeOn ()
 
void SetCostFunction (CostFunctionType *costFunction) override
 
virtual void SetMaximize (bool _arg)
 
virtual void SetUseScales (bool arg)
 

Static Public Member Functions

static Pointer New ()
 
- Static Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
static Pointer New ()
 

Protected Member Functions

void AddBetaDefinition (const BetaDefinitionType &name, ComputeBetaFunctionType function)
 
virtual double ComputeBeta (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaDY (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaDYHS (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaFR (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaHS (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaPR (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
double ComputeBetaSD (const DerivativeType &previousGradient, const DerivativeType &gradient, const ParametersType &previousSearchDir)
 
virtual void ComputeSearchDirection (const DerivativeType &previousGradient, const DerivativeType &gradient, ParametersType &searchDir)
 
 GenericConjugateGradientOptimizer ()
 
virtual void LineSearch (const ParametersType searchDir, double &step, ParametersType &x, MeasureType &f, DerivativeType &g)
 
void PrintSelf (std::ostream &os, Indent indent) const override
 
virtual void SetInLineSearch (bool _arg)
 
virtual bool TestConvergence (bool firstLineSearchDone)
 
 ~GenericConjugateGradientOptimizer () override=default
 
- Protected Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual void GetScaledDerivative (const ParametersType &parameters, DerivativeType &derivative) const
 
virtual MeasureType GetScaledValue (const ParametersType &parameters) const
 
virtual void GetScaledValueAndDerivative (const ParametersType &parameters, MeasureType &value, DerivativeType &derivative) const
 
void PrintSelf (std::ostream &os, Indent indent) const override
 
 ScaledSingleValuedNonLinearOptimizer ()
 
void SetCurrentPosition (const ParametersType &param) override
 
virtual void SetScaledCurrentPosition (const ParametersType &parameters)
 
 ~ScaledSingleValuedNonLinearOptimizer () override=default
 

Protected Attributes

BetaDefinitionType m_BetaDefinition {}
 
BetaDefinitionMapType m_BetaDefinitionMap {}
 
DerivativeType m_CurrentGradient {}
 
unsigned long m_CurrentIteration { 0 }
 
double m_CurrentStepLength { 0.0 }
 
MeasureType m_CurrentValue { 0.0 }
 
bool m_InLineSearch { false }
 
bool m_PreviousGradientAndSearchDirValid { false }
 
bool m_Stop { false }
 
StopConditionType m_StopCondition { StopConditionType::Unknown }
 
bool m_UseDefaultMaxNrOfItWithoutImprovement { true }
 
- Protected Attributes inherited from itk::ScaledSingleValuedNonLinearOptimizer
ScaledCostFunctionPointer m_ScaledCostFunction {}
 
ParametersType m_ScaledCurrentPosition {}
 

Private Attributes

double m_GradientMagnitudeTolerance { 1e-5 }
 
LineSearchOptimizerPointer m_LineSearchOptimizer { nullptr }
 
unsigned long m_MaximumNumberOfIterations { 100 }
 
unsigned long m_MaxNrOfItWithoutImprovement { 10 }
 
double m_ValueTolerance { 1e-5 }
 

Member Typedef Documentation

◆ BetaDefinitionMapType

◆ BetaDefinitionType

◆ ComputeBetaFunctionType

Initial value:
double (Self::*)(const DerivativeType &,
const DerivativeType &,
const ParametersType &)

Typedef for a function that computes $\beta$, given the previousGradient, the current gradient, and the previous search direction

Definition at line 65 of file itkGenericConjugateGradientOptimizer.h.

◆ ConstPointer

◆ LineSearchOptimizerPointer

◆ LineSearchOptimizerType

◆ Pointer

◆ ScaledCostFunctionType

◆ ScalesType

◆ Self

◆ Superclass

Member Enumeration Documentation

◆ StopConditionType

Enumerator
MetricError 
LineSearchError 
MaximumNumberOfIterations 
GradientMagnitudeTolerance 
ValueTolerance 
InfiniteBeta 
Unknown 

Definition at line 71 of file itkGenericConjugateGradientOptimizer.h.

Constructor & Destructor Documentation

◆ GenericConjugateGradientOptimizer()

itk::GenericConjugateGradientOptimizer::GenericConjugateGradientOptimizer ( )
protected

◆ ~GenericConjugateGradientOptimizer()

itk::GenericConjugateGradientOptimizer::~GenericConjugateGradientOptimizer ( )
overrideprotecteddefault

Member Function Documentation

◆ AddBetaDefinition()

void itk::GenericConjugateGradientOptimizer::AddBetaDefinition ( const BetaDefinitionType & name,
ComputeBetaFunctionType function )
protected

Function to add a new beta definition. The first argument should be a name via which a user can select this $\beta$ definition. The second argument is a pointer to a method that computes $\beta$. Called in the constructor of this class, and possibly by subclasses.

◆ ComputeBeta()

virtual double itk::GenericConjugateGradientOptimizer::ComputeBeta ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protectedvirtual

Compute $\beta$ according to the user set $\beta$-definition

◆ ComputeBetaDY()

double itk::GenericConjugateGradientOptimizer::ComputeBetaDY ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

"DaiYuan"

◆ ComputeBetaDYHS()

double itk::GenericConjugateGradientOptimizer::ComputeBetaDYHS ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

"DaiYuanHestenesStiefel"

◆ ComputeBetaFR()

double itk::GenericConjugateGradientOptimizer::ComputeBetaFR ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

"FletcherReeves"

◆ ComputeBetaHS()

double itk::GenericConjugateGradientOptimizer::ComputeBetaHS ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

"HestenesStiefel"

◆ ComputeBetaPR()

double itk::GenericConjugateGradientOptimizer::ComputeBetaPR ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

"PolakRibiere"

◆ ComputeBetaSD()

double itk::GenericConjugateGradientOptimizer::ComputeBetaSD ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
const ParametersType & previousSearchDir )
protected

Different definitions of $\beta$ "SteepestDescent: beta=0

◆ ComputeSearchDirection()

virtual void itk::GenericConjugateGradientOptimizer::ComputeSearchDirection ( const DerivativeType & previousGradient,
const DerivativeType & gradient,
ParametersType & searchDir )
protectedvirtual

Compute the search direction:

\[ d_{k} = - g_{k} + \beta_{k} d_{k-1} \]

In the first iteration the search direction is computed as:

\[ d_{0} = - g_{0} \]

On calling, searchDir should equal $d_{k-1}$. On return searchDir equals $d_{k}$.

◆ GetBetaDefinition()

virtual const BetaDefinitionType & itk::GenericConjugateGradientOptimizer::GetBetaDefinition ( )
virtual

◆ GetClassName()

virtual const char * itk::GenericConjugateGradientOptimizer::GetClassName ( ) const
virtual

◆ GetCurrentGradient()

virtual const DerivativeType & itk::GenericConjugateGradientOptimizer::GetCurrentGradient ( )
virtual

◆ GetCurrentIteration()

virtual unsigned long itk::GenericConjugateGradientOptimizer::GetCurrentIteration ( ) const
virtual

Get information about optimization process:

◆ GetCurrentStepLength()

virtual double itk::GenericConjugateGradientOptimizer::GetCurrentStepLength ( ) const
virtual

◆ GetCurrentValue()

virtual MeasureType itk::GenericConjugateGradientOptimizer::GetCurrentValue ( ) const
virtual

◆ GetGradientMagnitudeTolerance()

virtual double itk::GenericConjugateGradientOptimizer::GetGradientMagnitudeTolerance ( ) const
virtual

Setting: the mininum gradient magnitude. By default 1e-5.

The optimizer stops when: $ \|CurrentGradient\| <
  GradientMagnitudeTolerance * \max(1, \|CurrentPosition\| ) $

◆ GetInLineSearch()

virtual bool itk::GenericConjugateGradientOptimizer::GetInLineSearch ( ) const
virtual

◆ GetMaximumNumberOfIterations()

virtual unsigned long itk::GenericConjugateGradientOptimizer::GetMaximumNumberOfIterations ( ) const
virtual

Setting: the maximum number of iterations

◆ GetMaxNrOfItWithoutImprovement()

virtual unsigned long itk::GenericConjugateGradientOptimizer::GetMaxNrOfItWithoutImprovement ( ) const
virtual

◆ GetStopCondition()

virtual const StopConditionType & itk::GenericConjugateGradientOptimizer::GetStopCondition ( )
virtual

◆ GetValueTolerance()

virtual double itk::GenericConjugateGradientOptimizer::GetValueTolerance ( ) const
virtual

Setting: a stopping criterion, the value tolerance. By default 1e-5.

The optimizer stops when

\[ 2.0 * | f_k - f_{k-1} | \le
  ValueTolerance * ( |f_k| + |f_{k-1}| + 1e-20 ) \]

is satisfied MaxNrOfItWithoutImprovement times in a row.

◆ ITK_DISALLOW_COPY_AND_MOVE()

itk::GenericConjugateGradientOptimizer::ITK_DISALLOW_COPY_AND_MOVE ( GenericConjugateGradientOptimizer )

◆ itkGetModifiableObjectMacro()

itk::GenericConjugateGradientOptimizer::itkGetModifiableObjectMacro ( LineSearchOptimizer ,
LineSearchOptimizerType  )

◆ LineSearch()

virtual void itk::GenericConjugateGradientOptimizer::LineSearch ( const ParametersType searchDir,
double & step,
ParametersType & x,
MeasureType & f,
DerivativeType & g )
protectedvirtual

Perform a line search along the search direction. On calling, $x, f$, and $g$ should contain the current position, the cost function value at this position, and the derivative. On return the step, $x$ (new position), $f$ (value at $x$), and $g$ (derivative at $x$) are updated.

◆ New()

static Pointer itk::GenericConjugateGradientOptimizer::New ( )
static

◆ PrintSelf()

void itk::GenericConjugateGradientOptimizer::PrintSelf ( std::ostream & os,
Indent indent ) const
overrideprotected

◆ ResumeOptimization()

virtual void itk::GenericConjugateGradientOptimizer::ResumeOptimization ( )
virtual

◆ SetBetaDefinition()

void itk::GenericConjugateGradientOptimizer::SetBetaDefinition ( const BetaDefinitionType & arg)

Setting: the definition of $\beta$, by default "DaiYuanHestenesStiefel"

◆ SetGradientMagnitudeTolerance()

virtual void itk::GenericConjugateGradientOptimizer::SetGradientMagnitudeTolerance ( double _arg)
virtual

◆ SetInLineSearch()

virtual void itk::GenericConjugateGradientOptimizer::SetInLineSearch ( bool _arg)
protectedvirtual

◆ SetLineSearchOptimizer()

virtual void itk::GenericConjugateGradientOptimizer::SetLineSearchOptimizer ( LineSearchOptimizerType * _arg)
virtual

Setting: the line search optimizer

◆ SetMaximumNumberOfIterations()

virtual void itk::GenericConjugateGradientOptimizer::SetMaximumNumberOfIterations ( unsigned long _arg)
virtual

◆ SetMaxNrOfItWithoutImprovement()

virtual void itk::GenericConjugateGradientOptimizer::SetMaxNrOfItWithoutImprovement ( unsigned long arg)
virtual

Setting: the maximum number of iterations in a row that satisfy the value tolerance criterion. By default (if never set) equal to the number of parameters.

◆ SetValueTolerance()

virtual void itk::GenericConjugateGradientOptimizer::SetValueTolerance ( double _arg)
virtual

◆ StartOptimization()

void itk::GenericConjugateGradientOptimizer::StartOptimization ( )
override

◆ StopOptimization()

virtual void itk::GenericConjugateGradientOptimizer::StopOptimization ( )
virtual

◆ TestConvergence()

virtual bool itk::GenericConjugateGradientOptimizer::TestConvergence ( bool firstLineSearchDone)
protectedvirtual

Check if convergence has occurred; The firstLineSearchDone bool allows the implementation of TestConvergence to decide to skip a few convergence checks when no line search has performed yet (so, before the actual optimisation begins)

Reimplemented in elastix::ConjugateGradient< TElastix >.

Field Documentation

◆ m_BetaDefinition

BetaDefinitionType itk::GenericConjugateGradientOptimizer::m_BetaDefinition {}
protected

The name of the BetaDefinition

Definition at line 169 of file itkGenericConjugateGradientOptimizer.h.

◆ m_BetaDefinitionMap

BetaDefinitionMapType itk::GenericConjugateGradientOptimizer::m_BetaDefinitionMap {}
protected

A mapping that links the names of the BetaDefinitions to functions that compute $\beta$.

Definition at line 173 of file itkGenericConjugateGradientOptimizer.h.

◆ m_CurrentGradient

DerivativeType itk::GenericConjugateGradientOptimizer::m_CurrentGradient {}
protected

Definition at line 147 of file itkGenericConjugateGradientOptimizer.h.

◆ m_CurrentIteration

unsigned long itk::GenericConjugateGradientOptimizer::m_CurrentIteration { 0 }
protected

Definition at line 149 of file itkGenericConjugateGradientOptimizer.h.

◆ m_CurrentStepLength

double itk::GenericConjugateGradientOptimizer::m_CurrentStepLength { 0.0 }
protected

Definition at line 152 of file itkGenericConjugateGradientOptimizer.h.

◆ m_CurrentValue

MeasureType itk::GenericConjugateGradientOptimizer::m_CurrentValue { 0.0 }
protected

Definition at line 148 of file itkGenericConjugateGradientOptimizer.h.

◆ m_GradientMagnitudeTolerance

double itk::GenericConjugateGradientOptimizer::m_GradientMagnitudeTolerance { 1e-5 }
private

Definition at line 259 of file itkGenericConjugateGradientOptimizer.h.

◆ m_InLineSearch

bool itk::GenericConjugateGradientOptimizer::m_InLineSearch { false }
protected

Is true when the LineSearchOptimizer has been started.

Definition at line 159 of file itkGenericConjugateGradientOptimizer.h.

◆ m_LineSearchOptimizer

LineSearchOptimizerPointer itk::GenericConjugateGradientOptimizer::m_LineSearchOptimizer { nullptr }
private

Definition at line 262 of file itkGenericConjugateGradientOptimizer.h.

◆ m_MaximumNumberOfIterations

unsigned long itk::GenericConjugateGradientOptimizer::m_MaximumNumberOfIterations { 100 }
private

Definition at line 257 of file itkGenericConjugateGradientOptimizer.h.

◆ m_MaxNrOfItWithoutImprovement

unsigned long itk::GenericConjugateGradientOptimizer::m_MaxNrOfItWithoutImprovement { 10 }
private

Definition at line 260 of file itkGenericConjugateGradientOptimizer.h.

◆ m_PreviousGradientAndSearchDirValid

bool itk::GenericConjugateGradientOptimizer::m_PreviousGradientAndSearchDirValid { false }
protected

Flag that says if the previous gradient and search direction are known. Typically 'true' at the start of optimization, or when a stopped optimisation is resumed (in the latter case the previous gradient and search direction may of course still be valid, but to be safe it is assumed that they are not).

Definition at line 166 of file itkGenericConjugateGradientOptimizer.h.

◆ m_Stop

bool itk::GenericConjugateGradientOptimizer::m_Stop { false }
protected

Definition at line 151 of file itkGenericConjugateGradientOptimizer.h.

◆ m_StopCondition

StopConditionType itk::GenericConjugateGradientOptimizer::m_StopCondition { StopConditionType::Unknown }
protected

Definition at line 150 of file itkGenericConjugateGradientOptimizer.h.

◆ m_UseDefaultMaxNrOfItWithoutImprovement

bool itk::GenericConjugateGradientOptimizer::m_UseDefaultMaxNrOfItWithoutImprovement { true }
protected

Flag that is true as long as the method SetMaxNrOfItWithoutImprovement is never called

Definition at line 156 of file itkGenericConjugateGradientOptimizer.h.

◆ m_ValueTolerance

double itk::GenericConjugateGradientOptimizer::m_ValueTolerance { 1e-5 }
private

Definition at line 258 of file itkGenericConjugateGradientOptimizer.h.



Generated on 2024-07-17 for elastix by doxygen 1.11.0 (9b424b03c9833626cd435af22a444888fbbb192d) elastix logo