#include <itkGradientDescentOptimizer2.h>
Implement a gradient descent optimizer.
GradientDescentOptimizer2 implements a simple gradient descent optimizer. At each iteration the current position is updated according to
The learning rate is a fixed scalar defined via SetLearningRate(). The optimizer steps through a user defined number of iterations; no convergence checking is done.
Additionally, user can scale each component of the but setting a scaling vector using method SetScale().
The difference of this class with the itk::GradientDescentOptimizer is that it's based on the ScaledSingleValuedNonLinearOptimizer
Definition at line 54 of file itkGradientDescentOptimizer2.h.
Public Types | |
using | ConstPointer = SmartPointer<const Self> |
using | Pointer = SmartPointer<Self> |
using | ScaledCostFunctionPointer |
using | ScaledCostFunctionType |
using | ScalesType |
using | Self = GradientDescentOptimizer2 |
enum | StopConditionType { MaximumNumberOfIterations , MetricError , MinimumStepSize } |
using | Superclass = ScaledSingleValuedNonLinearOptimizer |
Public Types inherited from itk::ScaledSingleValuedNonLinearOptimizer | |
using | ConstPointer = SmartPointer<const Self> |
using | Pointer = SmartPointer<Self> |
using | ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer |
using | ScaledCostFunctionType = ScaledSingleValuedCostFunction |
using | ScalesType = NonLinearOptimizer::ScalesType |
using | Self = ScaledSingleValuedNonLinearOptimizer |
using | Superclass = SingleValuedNonLinearOptimizer |
Public Member Functions | |
virtual void | AdvanceOneStep () |
virtual const char * | GetClassName () const |
virtual unsigned int | GetCurrentIteration () const |
virtual const DerivativeType & | GetGradient () |
virtual const double & | GetLearningRate () |
virtual const unsigned long & | GetNumberOfIterations () |
virtual const DerivativeType & | GetSearchDirection () |
virtual const StopConditionType & | GetStopCondition () |
virtual const double & | GetValue () |
ITK_DISALLOW_COPY_AND_MOVE (GradientDescentOptimizer2) | |
virtual void | MetricErrorResponse (ExceptionObject &err) |
virtual void | ResumeOptimization () |
virtual void | SetLearningRate (double _arg) |
virtual void | SetNumberOfIterations (unsigned long _arg) |
void | StartOptimization () override |
virtual void | StopOptimization () |
Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer | |
const ParametersType & | GetCurrentPosition () const override |
virtual bool | GetMaximize () const |
virtual const ScaledCostFunctionType * | GetScaledCostFunction () |
virtual const ParametersType & | GetScaledCurrentPosition () |
bool | GetUseScales () const |
virtual void | InitializeScales () |
ITK_DISALLOW_COPY_AND_MOVE (ScaledSingleValuedNonLinearOptimizer) | |
virtual void | MaximizeOff () |
virtual void | MaximizeOn () |
void | SetCostFunction (CostFunctionType *costFunction) override |
virtual void | SetMaximize (bool _arg) |
virtual void | SetUseScales (bool arg) |
Static Public Member Functions | |
static Pointer | New () |
Static Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer | |
static Pointer | New () |
Protected Member Functions | |
GradientDescentOptimizer2 () | |
void | PrintSelf (std::ostream &os, Indent indent) const override |
~GradientDescentOptimizer2 () override=default | |
Protected Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer | |
virtual void | GetScaledDerivative (const ParametersType ¶meters, DerivativeType &derivative) const |
virtual MeasureType | GetScaledValue (const ParametersType ¶meters) const |
virtual void | GetScaledValueAndDerivative (const ParametersType ¶meters, MeasureType &value, DerivativeType &derivative) const |
void | PrintSelf (std::ostream &os, Indent indent) const override |
ScaledSingleValuedNonLinearOptimizer () | |
void | SetCurrentPosition (const ParametersType ¶m) override |
virtual void | SetScaledCurrentPosition (const ParametersType ¶meters) |
~ScaledSingleValuedNonLinearOptimizer () override=default | |
Protected Attributes | |
DerivativeType | m_Gradient {} |
DerivativeType | m_SearchDirection {} |
StopConditionType | m_StopCondition { MaximumNumberOfIterations } |
Protected Attributes inherited from itk::ScaledSingleValuedNonLinearOptimizer | |
ScaledCostFunctionPointer | m_ScaledCostFunction {} |
ParametersType | m_ScaledCurrentPosition {} |
Private Attributes | |
unsigned long | m_CurrentIteration { 0 } |
double | m_LearningRate { 1.0 } |
unsigned long | m_NumberOfIterations { 100 } |
bool | m_Stop { false } |
double | m_Value { 0.0 } |
using itk::GradientDescentOptimizer2::ConstPointer = SmartPointer<const Self> |
Definition at line 63 of file itkGradientDescentOptimizer2.h.
using itk::GradientDescentOptimizer2::Pointer = SmartPointer<Self> |
Definition at line 62 of file itkGradientDescentOptimizer2.h.
Definition at line 87 of file itkScaledSingleValuedNonLinearOptimizer.h.
Definition at line 86 of file itkScaledSingleValuedNonLinearOptimizer.h.
Definition at line 85 of file itkScaledSingleValuedNonLinearOptimizer.h.
Standard class typedefs.
Definition at line 60 of file itkGradientDescentOptimizer2.h.
Definition at line 61 of file itkGradientDescentOptimizer2.h.
Codes of stopping conditions The MinimumStepSize stopcondition never occurs, but may be implemented in inheriting classes
Enumerator | |
---|---|
MaximumNumberOfIterations | |
MetricError | |
MinimumStepSize |
Definition at line 83 of file itkGradientDescentOptimizer2.h.
|
protected |
|
overrideprotecteddefault |
|
virtual |
Advance one step following the gradient direction.
Reimplemented in elastix::AdaGrad< TElastix >, elastix::PreconditionedStochasticGradientDescent< TElastix >, and itk::StandardGradientDescentOptimizer.
|
virtual |
Run-time type information (and related methods).
Reimplemented from itk::ScaledSingleValuedNonLinearOptimizer.
Reimplemented in elastix::AdaGrad< TElastix >, elastix::AdaptiveStochasticGradientDescent< TElastix >, elastix::PreconditionedStochasticGradientDescent< TElastix >, elastix::StandardGradientDescent< TElastix >, itk::AdaptiveStepsizeOptimizer, itk::AdaptiveStochasticGradientDescentOptimizer, itk::PreconditionedASGDOptimizer, and itk::StandardGradientDescentOptimizer.
|
virtual |
Get the current iteration number.
|
virtual |
Get current gradient.
|
virtual |
Get the learning rate.
|
virtual |
Get the number of iterations.
|
virtual |
Get current search direction
|
virtual |
Get Stop condition.
|
virtual |
Get the current value.
itk::GradientDescentOptimizer2::ITK_DISALLOW_COPY_AND_MOVE | ( | GradientDescentOptimizer2 | ) |
|
virtual |
Stop optimization and pass on exception.
|
static |
Method for creation through the object factory.
|
overrideprotected |
|
virtual |
Resume previously stopped optimization with current parameters
Reimplemented in elastix::AdaGrad< TElastix >, elastix::AdaptiveStochasticGradientDescent< TElastix >, and elastix::PreconditionedStochasticGradientDescent< TElastix >.
|
virtual |
Set the learning rate.
|
virtual |
Set the number of iterations.
|
override |
Start optimization.
|
virtual |
Stop optimization.
|
private |
Definition at line 155 of file itkGradientDescentOptimizer2.h.
|
protected |
Definition at line 146 of file itkGradientDescentOptimizer2.h.
|
private |
Definition at line 152 of file itkGradientDescentOptimizer2.h.
|
private |
Definition at line 154 of file itkGradientDescentOptimizer2.h.
|
protected |
Definition at line 147 of file itkGradientDescentOptimizer2.h.
|
private |
Definition at line 153 of file itkGradientDescentOptimizer2.h.
|
protected |
Definition at line 148 of file itkGradientDescentOptimizer2.h.
|
private |
Definition at line 151 of file itkGradientDescentOptimizer2.h.
Generated on 2024-07-17 for elastix by 1.11.0 (9b424b03c9833626cd435af22a444888fbbb192d) |