go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
itk::StochasticGradientDescentOptimizer Class Reference

#include <itkStochasticGradientDescentOptimizer.h>

Detailed Description

Implement a gradient descent optimizer.

StochasticGradientDescentOptimizer implements a simple gradient descent optimizer. At each iteration the current position is updated according to

\[
         p_{n+1} = p_n
                 + \mbox{learningRate}
\, \frac{\partial f(p_n) }{\partial p_n}
  \]

The learning rate is a fixed scalar defined via SetLearningRate(). The optimizer steps through a user defined number of iterations; no convergence checking is done.

Additionally, user can scale each component of the $\partial f / \partial p$ but setting a scaling vector using method SetScale().

The difference of this class with the itk::GradientDescentOptimizer is that it's based on the ScaledSingleValuedNonLinearOptimizer

See also
ScaledSingleValuedNonLinearOptimizer

Definition at line 53 of file itkStochasticGradientDescentOptimizer.h.

Inheritance diagram for itk::StochasticGradientDescentOptimizer:

Data Structures

struct  MultiThreaderParameterType
 

Public Types

using ConstPointer = SmartPointer<const Self>
 
using Pointer = SmartPointer<Self>
 
using ScaledCostFunctionPointer
 
using ScaledCostFunctionType
 
using ScalesType
 
using Self = StochasticGradientDescentOptimizer
 
enum  StopConditionType {
  MaximumNumberOfIterations , MetricError , MinimumStepSize , InvalidDiagonalMatrix ,
  GradientMagnitudeTolerance , LineSearchError
}
 
using Superclass = ScaledSingleValuedNonLinearOptimizer
 
- Public Types inherited from itk::ScaledSingleValuedNonLinearOptimizer
using ConstPointer = SmartPointer<const Self>
 
using Pointer = SmartPointer<Self>
 
using ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer
 
using ScaledCostFunctionType = ScaledSingleValuedCostFunction
 
using ScalesType = NonLinearOptimizer::ScalesType
 
using Self = ScaledSingleValuedNonLinearOptimizer
 
using Superclass = SingleValuedNonLinearOptimizer
 

Public Member Functions

virtual void AdvanceOneStep ()
 
virtual const char * GetClassName () const
 
virtual unsigned int GetCurrentInnerIteration () const
 
virtual unsigned int GetCurrentIteration () const
 
virtual const DerivativeType & GetGradient ()
 
virtual unsigned int GetLBFGSMemory () const
 
virtual const doubleGetLearningRate ()
 
virtual const unsigned long & GetNumberOfInnerIterations ()
 
virtual const unsigned long & GetNumberOfIterations ()
 
virtual const DerivativeType & GetPreviousGradient ()
 
virtual const ParametersType & GetPreviousPosition ()
 
virtual const DerivativeType & GetSearchDir ()
 
virtual const StopConditionTypeGetStopCondition ()
 
virtual const doubleGetValue ()
 
 ITK_DISALLOW_COPY_AND_MOVE (StochasticGradientDescentOptimizer)
 
virtual void MetricErrorResponse (ExceptionObject &err)
 
virtual void ResumeOptimization ()
 
virtual void SetLearningRate (double _arg)
 
virtual void SetNumberOfIterations (unsigned long _arg)
 
void SetNumberOfWorkUnits (ThreadIdType numberOfThreads)
 
virtual void SetPreviousGradient (DerivativeType _arg)
 
virtual void SetPreviousPosition (ParametersType _arg)
 
virtual void SetUseEigen (bool _arg)
 
virtual void SetUseMultiThread (bool _arg)
 
void StartOptimization () override
 
virtual void StopOptimization ()
 
- Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
const ParametersType & GetCurrentPosition () const override
 
virtual bool GetMaximize () const
 
virtual const ScaledCostFunctionTypeGetScaledCostFunction ()
 
virtual const ParametersType & GetScaledCurrentPosition ()
 
bool GetUseScales () const
 
virtual void InitializeScales ()
 
 ITK_DISALLOW_COPY_AND_MOVE (ScaledSingleValuedNonLinearOptimizer)
 
virtual void MaximizeOff ()
 
virtual void MaximizeOn ()
 
void SetCostFunction (CostFunctionType *costFunction) override
 
virtual void SetMaximize (bool _arg)
 
virtual void SetUseScales (bool arg)
 

Static Public Member Functions

static Pointer New ()
 
- Static Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
static Pointer New ()
 

Protected Types

using ThreadInfoType = MultiThreaderBase::WorkUnitInfo
 

Protected Member Functions

void PrintSelf (std::ostream &os, Indent indent) const override
 
 StochasticGradientDescentOptimizer ()
 
 ~StochasticGradientDescentOptimizer () override=default
 
- Protected Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual void GetScaledDerivative (const ParametersType &parameters, DerivativeType &derivative) const
 
virtual MeasureType GetScaledValue (const ParametersType &parameters) const
 
virtual void GetScaledValueAndDerivative (const ParametersType &parameters, MeasureType &value, DerivativeType &derivative) const
 
void PrintSelf (std::ostream &os, Indent indent) const override
 
 ScaledSingleValuedNonLinearOptimizer ()
 
void SetCurrentPosition (const ParametersType &param) override
 
virtual void SetScaledCurrentPosition (const ParametersType &parameters)
 
 ~ScaledSingleValuedNonLinearOptimizer () override=default
 

Protected Attributes

unsigned long m_CurrentInnerIteration {}
 
unsigned long m_CurrentIteration { 0 }
 
DerivativeType m_Gradient {}
 
unsigned long m_LBFGSMemory { 0 }
 
double m_LearningRate { 1.0 }
 
ParametersType m_MeanSearchDir {}
 
unsigned long m_NumberOfInnerIterations {}
 
unsigned long m_NumberOfIterations { 100 }
 
DerivativeType m_PrePreviousGradient {}
 
ParametersType m_PrePreviousSearchDir {}
 
DerivativeType m_PreviousGradient {}
 
ParametersType m_PreviousPosition {}
 
ParametersType m_PreviousSearchDir {}
 
ParametersType m_SearchDir {}
 
bool m_Stop { false }
 
StopConditionType m_StopCondition { MaximumNumberOfIterations }
 
MultiThreaderBase::Pointer m_Threader { MultiThreaderBase::New() }
 
double m_Value { 0.0 }
 
- Protected Attributes inherited from itk::ScaledSingleValuedNonLinearOptimizer
ScaledCostFunctionPointer m_ScaledCostFunction {}
 
ParametersType m_ScaledCurrentPosition {}
 

Private Member Functions

void ThreadedAdvanceOneStep (ThreadIdType threadId, ParametersType &newPosition)
 

Static Private Member Functions

static ITK_THREAD_RETURN_FUNCTION_CALL_CONVENTION AdvanceOneStepThreaderCallback (void *arg)
 

Private Attributes

bool m_UseEigen { false }
 
bool m_UseMultiThread { false }
 

Member Typedef Documentation

◆ ConstPointer

◆ Pointer

◆ ScaledCostFunctionPointer

◆ ScaledCostFunctionType

◆ ScalesType

◆ Self

◆ Superclass

◆ ThreadInfoType

using itk::StochasticGradientDescentOptimizer::ThreadInfoType = MultiThreaderBase::WorkUnitInfo
protected

Typedef for multi-threading.

Definition at line 179 of file itkStochasticGradientDescentOptimizer.h.

Member Enumeration Documentation

◆ StopConditionType

Codes of stopping conditions The MinimumStepSize stopcondition never occurs, but may be implemented in inheriting classes

Enumerator
MaximumNumberOfIterations 
MetricError 
MinimumStepSize 
InvalidDiagonalMatrix 
GradientMagnitudeTolerance 
LineSearchError 

Definition at line 82 of file itkStochasticGradientDescentOptimizer.h.

Constructor & Destructor Documentation

◆ StochasticGradientDescentOptimizer()

itk::StochasticGradientDescentOptimizer::StochasticGradientDescentOptimizer ( )
protected

◆ ~StochasticGradientDescentOptimizer()

itk::StochasticGradientDescentOptimizer::~StochasticGradientDescentOptimizer ( )
overrideprotecteddefault

Member Function Documentation

◆ AdvanceOneStep()

virtual void itk::StochasticGradientDescentOptimizer::AdvanceOneStep ( )
virtual

Advance one step following the gradient direction.

Reimplemented in elastix::AdaptiveStochasticLBFGS< TElastix >, and itk::StandardStochasticGradientOptimizer.

◆ AdvanceOneStepThreaderCallback()

static ITK_THREAD_RETURN_FUNCTION_CALL_CONVENTION itk::StochasticGradientDescentOptimizer::AdvanceOneStepThreaderCallback ( void * arg)
staticprivate

The callback function.

◆ GetClassName()

virtual const char * itk::StochasticGradientDescentOptimizer::GetClassName ( ) const
virtual

◆ GetCurrentInnerIteration()

virtual unsigned int itk::StochasticGradientDescentOptimizer::GetCurrentInnerIteration ( ) const
virtual

Get the current inner iteration number.

◆ GetCurrentIteration()

virtual unsigned int itk::StochasticGradientDescentOptimizer::GetCurrentIteration ( ) const
virtual

Get the current iteration number.

◆ GetGradient()

virtual const DerivativeType & itk::StochasticGradientDescentOptimizer::GetGradient ( )
virtual

Get current gradient.

◆ GetLBFGSMemory()

virtual unsigned int itk::StochasticGradientDescentOptimizer::GetLBFGSMemory ( ) const
virtual

Get the inner LBFGSMemory.

◆ GetLearningRate()

virtual const double & itk::StochasticGradientDescentOptimizer::GetLearningRate ( )
virtual

Get the learning rate.

◆ GetNumberOfInnerIterations()

virtual const unsigned long & itk::StochasticGradientDescentOptimizer::GetNumberOfInnerIterations ( )
virtual

Get the number of inner loop iterations.

◆ GetNumberOfIterations()

virtual const unsigned long & itk::StochasticGradientDescentOptimizer::GetNumberOfIterations ( )
virtual

Get the number of iterations.

◆ GetPreviousGradient()

virtual const DerivativeType & itk::StochasticGradientDescentOptimizer::GetPreviousGradient ( )
virtual

Get the Previous gradient.

◆ GetPreviousPosition()

virtual const ParametersType & itk::StochasticGradientDescentOptimizer::GetPreviousPosition ( )
virtual

Get the Previous Position.

◆ GetSearchDir()

virtual const DerivativeType & itk::StochasticGradientDescentOptimizer::GetSearchDir ( )
virtual

Get current search direction.

◆ GetStopCondition()

virtual const StopConditionType & itk::StochasticGradientDescentOptimizer::GetStopCondition ( )
virtual

Get Stop condition.

◆ GetValue()

virtual const double & itk::StochasticGradientDescentOptimizer::GetValue ( )
virtual

Get the current value.

◆ ITK_DISALLOW_COPY_AND_MOVE()

itk::StochasticGradientDescentOptimizer::ITK_DISALLOW_COPY_AND_MOVE ( StochasticGradientDescentOptimizer )

◆ MetricErrorResponse()

virtual void itk::StochasticGradientDescentOptimizer::MetricErrorResponse ( ExceptionObject & err)
virtual

Stop optimisation and pass on exception.

◆ New()

static Pointer itk::StochasticGradientDescentOptimizer::New ( )
static

Method for creation through the object factory.

◆ PrintSelf()

void itk::StochasticGradientDescentOptimizer::PrintSelf ( std::ostream & os,
Indent indent ) const
overrideprotected

◆ ResumeOptimization()

virtual void itk::StochasticGradientDescentOptimizer::ResumeOptimization ( )
virtual

Resume previously stopped optimization with current parameters

See also
StopOptimization.

Reimplemented in elastix::AdaptiveStochasticLBFGS< TElastix >.

◆ SetLearningRate()

virtual void itk::StochasticGradientDescentOptimizer::SetLearningRate ( double _arg)
virtual

Set the learning rate.

◆ SetNumberOfIterations()

virtual void itk::StochasticGradientDescentOptimizer::SetNumberOfIterations ( unsigned long _arg)
virtual

Set the number of iterations.

◆ SetNumberOfWorkUnits()

void itk::StochasticGradientDescentOptimizer::SetNumberOfWorkUnits ( ThreadIdType numberOfThreads)
inline

Set the number of threads.

Definition at line 164 of file itkStochasticGradientDescentOptimizer.h.

◆ SetPreviousGradient()

virtual void itk::StochasticGradientDescentOptimizer::SetPreviousGradient ( DerivativeType _arg)
virtual

Set the Previous gradient.

◆ SetPreviousPosition()

virtual void itk::StochasticGradientDescentOptimizer::SetPreviousPosition ( ParametersType _arg)
virtual

Set the Previous Position.

◆ SetUseEigen()

virtual void itk::StochasticGradientDescentOptimizer::SetUseEigen ( bool _arg)
virtual

◆ SetUseMultiThread()

virtual void itk::StochasticGradientDescentOptimizer::SetUseMultiThread ( bool _arg)
virtual

◆ StartOptimization()

void itk::StochasticGradientDescentOptimizer::StartOptimization ( )
override

Start optimization.

◆ StopOptimization()

virtual void itk::StochasticGradientDescentOptimizer::StopOptimization ( )
virtual

Stop optimization.

See also
ResumeOptimization

Reimplemented in elastix::AdaptiveStochasticLBFGS< TElastix >.

◆ ThreadedAdvanceOneStep()

void itk::StochasticGradientDescentOptimizer::ThreadedAdvanceOneStep ( ThreadIdType threadId,
ParametersType & newPosition )
inlineprivate

The threaded implementation of AdvanceOneStep().

Field Documentation

◆ m_CurrentInnerIteration

unsigned long itk::StochasticGradientDescentOptimizer::m_CurrentInnerIteration {}
protected

Definition at line 199 of file itkStochasticGradientDescentOptimizer.h.

◆ m_CurrentIteration

unsigned long itk::StochasticGradientDescentOptimizer::m_CurrentIteration { 0 }
protected

Definition at line 198 of file itkStochasticGradientDescentOptimizer.h.

◆ m_Gradient

DerivativeType itk::StochasticGradientDescentOptimizer::m_Gradient {}
protected

Definition at line 183 of file itkStochasticGradientDescentOptimizer.h.

◆ m_LBFGSMemory

unsigned long itk::StochasticGradientDescentOptimizer::m_LBFGSMemory { 0 }
protected

Definition at line 200 of file itkStochasticGradientDescentOptimizer.h.

◆ m_LearningRate

double itk::StochasticGradientDescentOptimizer::m_LearningRate { 1.0 }
protected

Definition at line 188 of file itkStochasticGradientDescentOptimizer.h.

◆ m_MeanSearchDir

ParametersType itk::StochasticGradientDescentOptimizer::m_MeanSearchDir {}
protected

Definition at line 187 of file itkStochasticGradientDescentOptimizer.h.

◆ m_NumberOfInnerIterations

unsigned long itk::StochasticGradientDescentOptimizer::m_NumberOfInnerIterations {}
protected

Definition at line 197 of file itkStochasticGradientDescentOptimizer.h.

◆ m_NumberOfIterations

unsigned long itk::StochasticGradientDescentOptimizer::m_NumberOfIterations { 100 }
protected

Definition at line 196 of file itkStochasticGradientDescentOptimizer.h.

◆ m_PrePreviousGradient

DerivativeType itk::StochasticGradientDescentOptimizer::m_PrePreviousGradient {}
protected

Definition at line 191 of file itkStochasticGradientDescentOptimizer.h.

◆ m_PrePreviousSearchDir

ParametersType itk::StochasticGradientDescentOptimizer::m_PrePreviousSearchDir {}
protected

Definition at line 186 of file itkStochasticGradientDescentOptimizer.h.

◆ m_PreviousGradient

DerivativeType itk::StochasticGradientDescentOptimizer::m_PreviousGradient {}
protected

Definition at line 190 of file itkStochasticGradientDescentOptimizer.h.

◆ m_PreviousPosition

ParametersType itk::StochasticGradientDescentOptimizer::m_PreviousPosition {}
protected

Definition at line 192 of file itkStochasticGradientDescentOptimizer.h.

◆ m_PreviousSearchDir

ParametersType itk::StochasticGradientDescentOptimizer::m_PreviousSearchDir {}
protected

Definition at line 185 of file itkStochasticGradientDescentOptimizer.h.

◆ m_SearchDir

ParametersType itk::StochasticGradientDescentOptimizer::m_SearchDir {}
protected

Definition at line 184 of file itkStochasticGradientDescentOptimizer.h.

◆ m_Stop

bool itk::StochasticGradientDescentOptimizer::m_Stop { false }
protected

Definition at line 195 of file itkStochasticGradientDescentOptimizer.h.

◆ m_StopCondition

StopConditionType itk::StochasticGradientDescentOptimizer::m_StopCondition { MaximumNumberOfIterations }
protected

Definition at line 189 of file itkStochasticGradientDescentOptimizer.h.

◆ m_Threader

MultiThreaderBase::Pointer itk::StochasticGradientDescentOptimizer::m_Threader { MultiThreaderBase::New() }
protected

Definition at line 193 of file itkStochasticGradientDescentOptimizer.h.

◆ m_UseEigen

bool itk::StochasticGradientDescentOptimizer::m_UseEigen { false }
private

Definition at line 211 of file itkStochasticGradientDescentOptimizer.h.

◆ m_UseMultiThread

bool itk::StochasticGradientDescentOptimizer::m_UseMultiThread { false }
private

Definition at line 204 of file itkStochasticGradientDescentOptimizer.h.

◆ m_Value

double itk::StochasticGradientDescentOptimizer::m_Value { 0.0 }
protected

Definition at line 182 of file itkStochasticGradientDescentOptimizer.h.



Generated on 2024-07-17 for elastix by doxygen 1.11.0 (9b424b03c9833626cd435af22a444888fbbb192d) elastix logo