12 #ifndef MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
13 #define MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
16 #include <boost/ptr_container/ptr_vector.hpp>
18 #include "../visitor/delta_visitor.hpp"
19 #include "../visitor/output_parameter_visitor.hpp"
20 #include "../visitor/reset_visitor.hpp"
21 #include "../visitor/weight_size_visitor.hpp"
52 typename InputDataType = arma::mat,
53 typename OutputDataType = arma::mat
55 class RecurrentAttention
72 template<
typename RNNModuleType,
typename ActionModuleType>
74 const RNNModuleType& rnn,
75 const ActionModuleType& action,
86 void Forward(arma::Mat<eT>&& input, arma::Mat<eT>&& output);
98 void Backward(
const arma::Mat<eT>&& ,
109 template<
typename eT>
115 std::vector<LayerTypes<>>&
Model() {
return network; }
123 OutputDataType
const&
Parameters()
const {
return parameters; }
138 OutputDataType
const&
Delta()
const {
return delta; }
140 OutputDataType&
Delta() {
return delta; }
143 OutputDataType
const&
Gradient()
const {
return gradient; }
150 template<
typename Archive>
151 void serialize(Archive& ar,
const unsigned int );
155 void IntermediateGradient()
157 intermediateGradient.zeros();
160 if (backwardStep == (rho - 1))
163 std::move(actionError)), actionModule);
167 boost::apply_visitor(GradientVisitor(std::move(boost::apply_visitor(
168 outputParameterVisitor, actionModule)), std::move(actionError)),
173 boost::apply_visitor(GradientVisitor(std::move(boost::apply_visitor(
174 outputParameterVisitor, rnnModule)), std::move(recurrentError)),
177 attentionGradient += intermediateGradient;
184 LayerTypes<> rnnModule;
187 LayerTypes<> actionModule;
202 OutputDataType parameters;
205 std::vector<LayerTypes<>> network;
208 WeightSizeVisitor weightSizeVisitor;
211 DeltaVisitor deltaVisitor;
214 OutputParameterVisitor outputParameterVisitor;
217 std::vector<arma::mat> feedbackOutputParameter;
220 std::vector<arma::mat> moduleOutputParameter;
223 OutputDataType delta;
226 OutputDataType gradient;
229 InputDataType inputParameter;
232 OutputDataType outputParameter;
235 arma::mat recurrentError;
238 arma::mat actionError;
241 arma::mat actionDelta;
247 arma::mat initialInput;
250 ResetVisitor resetVisitor;
253 arma::mat attentionGradient;
256 arma::mat intermediateGradient;
263 #include "recurrent_attention_impl.hpp"
bool & Deterministic()
Modify the value of the deterministic parameter.
bool Deterministic() const
The value of the deterministic parameter.
OutputDataType & Parameters()
Modify the parameters.
OutputDataType const & Delta() const
Get the delta.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
InputDataType & InputParameter()
Modify the input parameter.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & OutputParameter()
Modify the output parameter.
RecurrentAttention()
Default constructor: this will not give a usable RecurrentAttention object, so be sure to set all the...
OutputDataType & Delta()
Modify the delta.
SearchModeVisitor executes the Gradient() method of the given module using the input and delta parame...
OutputDataType const & Gradient() const
Get the gradient.
std::vector< LayerTypes<> > & Model()
Get the model modules.
void Backward(const arma::Mat< eT > &&, arma::Mat< eT > &&gy, arma::Mat< eT > &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType const & Parameters() const
Get the parameters.
void Forward(arma::Mat< eT > &&input, arma::Mat< eT > &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
InputDataType const & InputParameter() const
Get the input parameter.