Revert to old Assisted Excitation version.

pull/4302/head
AlexeyAB 6 years ago
parent 5bbbbd7c53
commit 9fe201807e
  1. 8
      src/convolutional_kernels.cu
  2. 10
      src/parser.c
  3. 2
      src/route_layer.c

@ -1019,8 +1019,8 @@ void assisted_excitation_forward_gpu(convolutional_layer l, network_state state)
for (t = 0; t < state.net.num_boxes; ++t) {
box truth = float_to_box_stride(truth_cpu + t*(4 + 1) + b*l.truths, 1);
if (!truth.x) break; // continue;
//float beta = 0;
float beta = 1 - alpha; // from 0 to 1
float beta = 0;
//float beta = 1 - alpha; // from 0 to 1
float dw = (1 - truth.w) * beta;
float dh = (1 - truth.h) * beta;
//printf(" alpha = %f, beta = %f, truth.w = %f, dw = %f, tw+dw = %f, l.out_w = %d \n", alpha, beta, truth.w, dw, truth.w+dw, l.out_w);
@ -1052,9 +1052,9 @@ void assisted_excitation_forward_gpu(convolutional_layer l, network_state state)
//CHECK_CUDA(cudaPeekAtLastError());
// calc new output
assisted_activation2_gpu(1, l.output_gpu, l.gt_gpu, l.a_avg_gpu, l.out_w * l.out_h, l.out_c, l.batch); // AE3: gt increases (beta = 1 - alpha = 0)
//assisted_activation2_gpu(1, l.output_gpu, l.gt_gpu, l.a_avg_gpu, l.out_w * l.out_h, l.out_c, l.batch); // AE3: gt increases (beta = 1 - alpha = 0)
//assisted_activation2_gpu(alpha, l.output_gpu, l.gt_gpu, l.a_avg_gpu, l.out_w * l.out_h, l.out_c, l.batch);
//assisted_activation_gpu(alpha, l.output_gpu, l.gt_gpu, l.a_avg_gpu, l.out_w * l.out_h, l.out_c, l.batch);
assisted_activation_gpu(alpha, l.output_gpu, l.gt_gpu, l.a_avg_gpu, l.out_w * l.out_h, l.out_c, l.batch);
//cudaStreamSynchronize(get_cuda_stream());
//CHECK_CUDA(cudaPeekAtLastError());

@ -797,6 +797,16 @@ route_layer parse_route(list *options, size_params params)
}
layer.out_c = layer.out_c / layer.groups;
layer.w = first.w;
layer.h = first.h;
layer.c = layer.out_c;
if (n > 3) fprintf(stderr, " \t ");
else if (n > 1) fprintf(stderr, " \t ");
else fprintf(stderr, " \t\t ");
fprintf(stderr, " -> %4d x%4d x%4d \n", layer.w, layer.h, layer.c, layer.out_w, layer.out_h, layer.out_c);
return layer;
}

@ -20,10 +20,10 @@ route_layer make_route_layer(int batch, int n, int *input_layers, int *input_siz
fprintf(stderr," %d", input_layers[i]);
outputs += input_sizes[i];
}
fprintf(stderr, "\n");
outputs = outputs / groups;
l.outputs = outputs;
l.inputs = outputs;
//fprintf(stderr, " inputs = %d \t outputs = %d, groups = %d, group_id = %d \n", l.inputs, l.outputs, l.groups, l.group_id);
l.delta = (float*)calloc(outputs * batch, sizeof(float));
l.output = (float*)calloc(outputs * batch, sizeof(float));

Loading…
Cancel
Save