Fixed add_bias vs batch_norm on CPU

pull/1557/head
AlexeyAB 5 years ago
parent 81c5bcea92
commit f64746107d
  1. 2
      src/convolutional_layer.c
  2. 3
      src/parser.c

@ -1249,7 +1249,9 @@ void forward_convolutional_layer(convolutional_layer l, network_state state)
if(l.batch_normalize){
forward_batchnorm_layer(l, state);
}
else {
add_bias(l.output, l.biases, l.batch, l.n, out_h*out_w);
}
//activate_array(l.output, m*n*l.batch, l.activation);
if (l.activation == SWISH) activate_array_swish(l.output, l.outputs*l.batch, l.activation_input, l.output);

@ -1864,10 +1864,11 @@ network *load_network_custom(char *cfg, char *weights, int clear, int batch)
{
printf(" Try to load cfg: %s, weights: %s, clear = %d \n", cfg, weights, clear);
network* net = (network*)calloc(1, sizeof(network));
*net = parse_network_cfg_custom(cfg, batch, 0);
*net = parse_network_cfg_custom(cfg, batch, 1);
if (weights && weights[0] != 0) {
load_weights(net, weights);
}
//fuse_conv_batchnorm(*net);
if (clear) (*net->seen) = 0;
return net;
}

Loading…
Cancel
Save