From d6181c67cd72468a3b8c658586452a5c6fa8fcbf Mon Sep 17 00:00:00 2001 From: AlexeyAB Date: Thu, 26 Mar 2020 15:28:20 +0300 Subject: [PATCH] Fix one relu() to lrelu() for weighted-shortcut --- src/blas_kernels.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/blas_kernels.cu b/src/blas_kernels.cu index e63d15a9..f6c5c67e 100644 --- a/src/blas_kernels.cu +++ b/src/blas_kernels.cu @@ -1034,7 +1034,7 @@ __global__ void backward_shortcut_multilayer_kernel(int size, int src_outputs, i const int weights_index = src_i / step + (i + 1)*layer_step; // [0 or c or (c, h ,w)] float w = weights_gpu[weights_index]; - if (weights_normalizion == RELU_NORMALIZATION) w = relu(w) / sum; + if (weights_normalizion == RELU_NORMALIZATION) w = lrelu(w) / sum; else if (weights_normalizion == SOFTMAX_NORMALIZATION) w = expf(w - max_val) / sum; if (weights_normalizion == RELU_NORMALIZATION) grad = w;