Fix one relu() to lrelu() for weighted-shortcut

pull/5172/head
AlexeyAB 5 years ago
parent 8cb1f0c44f
commit d6181c67cd
  1. 2
      src/blas_kernels.cu

@ -1034,7 +1034,7 @@ __global__ void backward_shortcut_multilayer_kernel(int size, int src_outputs, i
const int weights_index = src_i / step + (i + 1)*layer_step; // [0 or c or (c, h ,w)]
float w = weights_gpu[weights_index];
if (weights_normalizion == RELU_NORMALIZATION) w = relu(w) / sum;
if (weights_normalizion == RELU_NORMALIZATION) w = lrelu(w) / sum;
else if (weights_normalizion == SOFTMAX_NORMALIZATION) w = expf(w - max_val) / sum;
if (weights_normalizion == RELU_NORMALIZATION) grad = w;

Loading…
Cancel
Save