Skip to content

Commit

Permalink
bug-fix
Browse files Browse the repository at this point in the history
  • Loading branch information
js1010 committed Feb 10, 2021
1 parent 4a542ab commit dec415e
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
5 changes: 2 additions & 3 deletions cpp/include/cuda_build_kernels.cuh
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ void SearchHeuristic(
const bool save_remains,
int* cand_nodes, cuda_scalar* cand_distances,
int* graph, float* distances, int* deg,
const float heuristic_coef, int new_comer = -1) {
if (save_remains) new_comer = -1;
const float heuristic_coef, const int new_comer = -1) {
int size2 = *size;
__syncthreads();

Expand Down Expand Up @@ -317,7 +316,7 @@ __global__ void BuildLevelGraphKernel(
}

__syncthreads();
int new_comer = went_through_heuristic[dstid]? srcid: -1;
const int new_comer = not save_remains and went_through_heuristic[dstid]? srcid: -1;
__syncthreads();
SearchHeuristic(ef_const_pq, &size, dstid, nodes,
data, dist_type, num_dims,
Expand Down
4 changes: 2 additions & 2 deletions cpp/include/cuda_dist_kernels.cuh
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ cuda_scalar dot(const cuda_scalar * a, const cuda_scalar * b, const int num_dims
// partially reduce the dot product inside each warp using a shuffle
cuda_scalar val = 0;
for (int i = threadIdx.x; i < num_dims; i += blockDim.x)
val += mul(a[i], b[i]);
val = add(val, mul(a[i], b[i]));
val = warp_reduce_sum(val);

// write out the partial reduction to shared memory if appropiate
Expand Down Expand Up @@ -80,7 +80,7 @@ cuda_scalar squaresum(const cuda_scalar * a, const cuda_scalar * b, const int nu
cuda_scalar val = 0;
for (int i = threadIdx.x; i < num_dims; i += blockDim.x) {
cuda_scalar _val = sub(a[i], b[i]);
val = mul(_val, _val);
val = add(val, mul(_val, _val));
}
__syncthreads();
val = warp_reduce_sum(val);
Expand Down

0 comments on commit dec415e

Please sign in to comment.