16 #ifndef __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP
17 #define __MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP
22 namespace optimization {
61 template<
typename DecomposableFunctionType>
85 Adam(DecomposableFunctionType&
function,
86 const double stepSize = 0.001,
87 const double beta1 = 0.9,
88 const double beta2 = 0.999,
89 const double eps = 1e-8,
90 const size_t maxIterations = 100000,
91 const double tolerance = 1e-5,
92 const bool shuffle =
true);
102 double Optimize(arma::mat& iterate);
105 const DecomposableFunctionType&
Function()
const {
return function; }
107 DecomposableFunctionType&
Function() {
return function; }
115 double Beta1()
const {
return beta1; }
120 double Beta2()
const {
return beta2; }
146 DecomposableFunctionType&
function;
161 size_t maxIterations;
175 #include "adam_impl.hpp"
double & Beta2()
Modify the second moment coefficient.
Adam(DecomposableFunctionType &function, const double stepSize=0.001, const double beta1=0.9, const double beta2=0.999, const double eps=1e-8, const size_t maxIterations=100000, const double tolerance=1e-5, const bool shuffle=true)
Construct the Adam optimizer with the given function and parameters.
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool & Shuffle()
Modify whether or not the individual functions are shuffled.
double Beta2() const
Get the second moment coefficient.
bool Shuffle() const
Get whether or not the individual functions are shuffled.
size_t MaxIterations() const
Get the maximum number of iterations (0 indicates no limit).
const DecomposableFunctionType & Function() const
Get the instantiated function to be optimized.
size_t & MaxIterations()
Modify the maximum number of iterations (0 indicates no limit).
double StepSize() const
Get the step size.
double & StepSize()
Modify the step size.
double Optimize(arma::mat &iterate)
Optimize the given function using Adam.
double Tolerance() const
Get the tolerance for termination.
double Epsilon() const
Get the value used to initialise the mean squared gradient parameter.
double & Beta1()
Modify the smoothing parameter.
double Beta1() const
Get the smoothing parameter.
double & Epsilon()
Modify the value used to initialise the mean squared gradient parameter.
Adam is an optimizer that computes individual adaptive learning rates for different parameters from e...
double & Tolerance()
Modify the tolerance for termination.
DecomposableFunctionType & Function()
Modify the instantiated function.