12 #ifndef MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
13 #define MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
16 #include <boost/ptr_container/ptr_vector.hpp>
18 #include "../visitor/delta_visitor.hpp"
19 #include "../visitor/output_parameter_visitor.hpp"
20 #include "../visitor/reset_visitor.hpp"
21 #include "../visitor/weight_size_visitor.hpp"
52 typename InputDataType = arma::mat,
53 typename OutputDataType = arma::mat
55 class RecurrentAttention
72 template<
typename RNNModuleType,
typename ActionModuleType>
74 const RNNModuleType& rnn,
75 const ActionModuleType& action,
86 void Forward(arma::Mat<eT>&& input, arma::Mat<eT>&& output);
98 void Backward(
const arma::Mat<eT>&& ,
109 template<
typename eT>
115 std::vector<LayerTypes<>>&
Model() {
return network; }
123 OutputDataType
const&
Parameters()
const {
return parameters; }
133 OutputDataType
const&
Delta()
const {
return delta; }
135 OutputDataType&
Delta() {
return delta; }
138 OutputDataType
const&
Gradient()
const {
return gradient; }
145 template<
typename Archive>
146 void serialize(Archive& ar,
const unsigned int );
150 void IntermediateGradient()
152 intermediateGradient.zeros();
155 if (backwardStep == (rho - 1))
158 std::move(actionError)), actionModule);
162 boost::apply_visitor(GradientVisitor(std::move(boost::apply_visitor(
163 outputParameterVisitor, actionModule)), std::move(actionError)),
168 boost::apply_visitor(GradientVisitor(std::move(boost::apply_visitor(
169 outputParameterVisitor, rnnModule)), std::move(recurrentError)),
172 attentionGradient += intermediateGradient;
179 LayerTypes<> rnnModule;
182 LayerTypes<> actionModule;
197 OutputDataType parameters;
200 std::vector<LayerTypes<>> network;
203 WeightSizeVisitor weightSizeVisitor;
206 DeltaVisitor deltaVisitor;
209 OutputParameterVisitor outputParameterVisitor;
212 std::vector<arma::mat> feedbackOutputParameter;
215 std::vector<arma::mat> moduleOutputParameter;
218 OutputDataType delta;
221 OutputDataType gradient;
224 OutputDataType outputParameter;
227 arma::mat recurrentError;
230 arma::mat actionError;
233 arma::mat actionDelta;
239 arma::mat initialInput;
242 ResetVisitor resetVisitor;
245 arma::mat attentionGradient;
248 arma::mat intermediateGradient;
255 #include "recurrent_attention_impl.hpp"
bool & Deterministic()
Modify the value of the deterministic parameter.
bool Deterministic() const
The value of the deterministic parameter.
OutputDataType & Parameters()
Modify the parameters.
OutputDataType const & Delta() const
Get the delta.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & OutputParameter()
Modify the output parameter.
RecurrentAttention()
Default constructor: this will not give a usable RecurrentAttention object, so be sure to set all the...
OutputDataType & Delta()
Modify the delta.
SearchModeVisitor executes the Gradient() method of the given module using the input and delta parame...
OutputDataType const & Gradient() const
Get the gradient.
std::vector< LayerTypes<> > & Model()
Get the model modules.
void Backward(const arma::Mat< eT > &&, arma::Mat< eT > &&gy, arma::Mat< eT > &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType const & Parameters() const
Get the parameters.
void Forward(arma::Mat< eT > &&input, arma::Mat< eT > &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...