/usr/include/ITK-4.10/itkRegularStepGradientDescentOptimizerv4.h is in libinsighttoolkit4-dev 4.10.1-dfsg1-1.1+b1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 | /*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkRegularStepGradientDescentOptimizerv4_h
#define itkRegularStepGradientDescentOptimizerv4_h
#include "itkGradientDescentOptimizerv4.h"
#include <itkCompensatedSummation.h>
namespace itk
{
/** \class RegularStepGradientDescentOptimizerv4
* \brief Regular Step Gradient descent optimizer.
*
* This optimizer is a variant of gradient descent that attempts to prevent it
* from taking steps that are too large. At each iteration, this optimizer
* will take a step along the direction of the metric derivative. Each time the
* direction of the derivative abruptly changes, the optimizer assumes that a
* local extrema has been passed and reacts by reducing the step length by a
* relaxation factor that is set to 0.5 by default.
* The default value for the initial step length is 1, and this value can only
* be changed manually via SetLearningRate() since this optimizer does not use
* the ScaleEstimator to automatically estimate the learning rate.
* Also note that unlike the previous version of ReuglarStepGradientDescentOptimizer,
* ITKv4 does not have a "maximize/minimize" option to modify the effect of
* the metric derivative. The assigned metric is assumed to return a parameter
* derivative result that "improves" the optimization.
*
* \ingroup ITKOptimizersv4
*/
template<typename TInternalComputationValueType>
class RegularStepGradientDescentOptimizerv4
: public GradientDescentOptimizerv4Template<TInternalComputationValueType>
{
public:
/** Standard class typedefs. */
typedef RegularStepGradientDescentOptimizerv4 Self;
typedef GradientDescentOptimizerv4Template<TInternalComputationValueType> Superclass;
typedef SmartPointer< Self > Pointer;
typedef SmartPointer< const Self > ConstPointer;
/** Run-time type information (and related methods). */
itkTypeMacro(RegularStepGradientDescentOptimizerv4, Superclass);
/** New macro for creation of through a Smart Pointer */
itkNewMacro(Self);
/** It should be possible to derive the internal computation type from the class object. */
typedef TInternalComputationValueType InternalComputationValueType;
/** Derivative type */
typedef typename Superclass::DerivativeType DerivativeType;
/** Metric type over which this class is templated */
typedef typename Superclass::MeasureType MeasureType;
typedef typename Superclass::IndexRangeType IndexRangeType;
typedef typename Superclass::ScalesType ScalesType;
typedef typename Superclass::ParametersType ParametersType;
typedef typename Superclass::StopConditionType StopConditionType;
/** Compensated summation type */
typedef CompensatedSummation< InternalComputationValueType > CompensatedSummationType;
/** Minimum step length (learning rate) value for convergence checking.
* The step length is decreased by relaxation factor if the step is too
* long, and the algorithm passes the local minimum.
* When the step length value reaches a small value, it would be treated
* as converged.
*
* The default m_MinimumStepLength is set to 1e-4 to pass all
* tests.
*/
itkSetMacro(MinimumStepLength, TInternalComputationValueType);
itkGetConstReferenceMacro(MinimumStepLength, TInternalComputationValueType);
/** Set/Get relaxation factor value */
itkSetMacro(RelaxationFactor, TInternalComputationValueType);
itkGetConstReferenceMacro(RelaxationFactor, TInternalComputationValueType);
/** Set/Get gradient magnitude tolerance value */
itkSetMacro(GradientMagnitudeTolerance, TInternalComputationValueType);
itkGetConstReferenceMacro(GradientMagnitudeTolerance, TInternalComputationValueType);
/** Start and run the optimization */
virtual void StartOptimization( bool doOnlyInitialization = false ) ITK_OVERRIDE;
/** Estimate the learning rate based on the current gradient. */
virtual void EstimateLearningRate() ITK_OVERRIDE;
/** Get current gradient step value */
double GetCurrentStepLength() const;
protected:
/** Advance one Step following the gradient direction.
* Includes transform update. */
virtual void AdvanceOneStep(void) ITK_OVERRIDE;
/** Modify the input gradient over a given index range. */
virtual void ModifyGradientByScalesOverSubRange( const IndexRangeType& subrange ) ITK_OVERRIDE;
virtual void ModifyGradientByLearningRateOverSubRange( const IndexRangeType& subrange ) ITK_OVERRIDE;
/**
* When the local minima is passed by taking a large step,
* the step size is adjusted by the relaxation factor, so we
* can take smaller steps toward the minimum point.
*/
TInternalComputationValueType m_RelaxationFactor;
/** Default constructor */
RegularStepGradientDescentOptimizerv4();
/** Destructor */
virtual ~RegularStepGradientDescentOptimizerv4();
virtual void PrintSelf( std::ostream & os, Indent indent ) const ITK_OVERRIDE;
/** Minimum gradient step value for convergence checking */
TInternalComputationValueType m_MinimumStepLength;
/** Minimum gradient magnitude value for convergence checking */
TInternalComputationValueType m_GradientMagnitudeTolerance;
/** Current scale for learning rate */
MeasureType m_CurrentLearningRateRelaxation;
private:
RegularStepGradientDescentOptimizerv4( const Self & ) ITK_DELETE_FUNCTION;
void operator=( const Self& ) ITK_DELETE_FUNCTION;
};
} // end namespace itk
#ifndef ITK_MANUAL_INSTANTIATION
#include "itkRegularStepGradientDescentOptimizerv4.hxx"
#endif
#endif
|