CUV
0.9.201304091348
|
function optimization More...
Functions | |
template<class V , class M , class L > | |
void | softmax (cuv::tensor< V, M, L > &dst, const cuv::tensor< V, M, L > &src, unsigned int vardim=1) |
calculate derivative of softmax. | |
template<class V , class M , class L > | |
void | softmax_derivative (cuv::tensor< V, M, L > &dst, const cuv::tensor< V, M, L > &softmax_act, const cuv::tensor< V, M, L > &residual, unsigned int vardim=1) |
calculate derivative of softmax. | |
template<class V , class M , class L > | |
void | adagrad (tensor< V, M, L > &W, const tensor< V, M, L > &dW, tensor< V, M, L > &sW, const float &learnrate, const float &delta, const float &decay=0.0f, const float &sparsedecay=0.0f) |
Do a gradient update step using AdaGrad. | |
template<class V , class M , class L > | |
void | rmsprop (tensor< V, M, L > &W, const tensor< V, M, L > &dW, tensor< V, M, L > &sW, const float &learnrate, const float &delta, const float &decay=0.0f, const float &sparsedecay=0.0f, const float &grad_avg=0.9f) |
Do a gradient update step using RMSPROP. |
function optimization