Minor fix in batchnorm_layer (side effect in python samples)

pull/4302/head
AlexeyAB 6 years ago
parent a4012895c1
commit f42923350b
  1. 4
      src/batchnorm_layer.c
  2. 1
      src/parser.c

@ -182,9 +182,9 @@ void forward_batchnorm_layer_gpu(layer l, network_state state)
if (l.type == BATCHNORM) simple_copy_ongpu(l.outputs*l.batch, state.input, l.output_gpu);
//copy_ongpu(l.outputs*l.batch, state.input, 1, l.output_gpu, 1);
simple_copy_ongpu(l.outputs*l.batch, l.output_gpu, l.x_gpu);
//copy_ongpu(l.outputs*l.batch, l.output_gpu, 1, l.x_gpu, 1);
if (state.train) {
simple_copy_ongpu(l.outputs*l.batch, l.output_gpu, l.x_gpu);
//copy_ongpu(l.outputs*l.batch, l.output_gpu, 1, l.x_gpu, 1);
#ifdef CUDNN
float one = 1;
float zero = 0;

@ -978,6 +978,7 @@ network parse_network_cfg_custom(char *filename, int batch, int time_steps)
params.batch = net.batch;
params.time_steps = net.time_steps;
params.net = net;
printf("batch = %d, time_steps = %d, train = %d \n", net.batch, net.time_steps, params.train);
float bflops = 0;
size_t workspace_size = 0;

Loading…
Cancel
Save