Added URL to the description of activation MISH on GitHub

pull/4269/head
AlexeyAB 6 years ago
parent bf8ea4183d
commit c7c7078de7
  1. 2
      src/activation_kernels.cu
  2. 2
      src/activations.c

@ -199,6 +199,7 @@ __global__ void activate_array_swish_kernel(float *x, int n, float *output_sigmo
} }
} }
// https://github.com/digantamisra98/Mish
__global__ void activate_array_mish_kernel(float *x, int n, float *activation_input, float *output_gpu) __global__ void activate_array_mish_kernel(float *x, int n, float *activation_input, float *output_gpu)
{ {
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x; int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;
@ -273,6 +274,7 @@ __global__ void gradient_array_swish_kernel(float *x, int n, float *sigmoid_gpu,
} }
} }
// https://github.com/digantamisra98/Mish
__global__ void gradient_array_mish_kernel(int n, float *activation_input, float *delta) __global__ void gradient_array_mish_kernel(int n, float *activation_input, float *delta)
{ {
int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x; int i = (blockIdx.x + blockIdx.y*gridDim.x) * blockDim.x + threadIdx.x;

@ -134,6 +134,7 @@ void activate_array_swish(float *x, const int n, float * output_sigmoid, float *
} }
} }
// https://github.com/digantamisra98/Mish
void activate_array_mish(float *x, const int n, float * activation_input, float * output) void activate_array_mish(float *x, const int n, float * activation_input, float * output)
{ {
int i; int i;
@ -200,6 +201,7 @@ void gradient_array_swish(const float *x, const int n, const float * sigmoid, fl
} }
} }
// https://github.com/digantamisra98/Mish
void gradient_array_mish(const int n, const float * activation_input, float * delta) void gradient_array_mish(const int n, const float * activation_input, float * delta)
{ {
int i; int i;

Loading…
Cancel
Save