[Bugfix][Kernel] Fix moe align block issue for mixtral (#12413)

This commit is contained in:
ElizaWszola 2025-01-24 20:49:28 -05:00 committed by GitHub
parent 3132a933b6
commit 221d388cc5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -33,7 +33,9 @@ __global__ void moe_align_block_size_kernel(scalar_t* __restrict__ topk_ids,
extern __shared__ int32_t shared_mem[];
int32_t* cumsum = shared_mem; // 1d tensor with shape (num_experts + 1)
token_cnts_t* tokens_cnts = (token_cnts_t*)(shared_mem + blockDim.x + 1);
token_cnts_t* tokens_cnts =
(token_cnts_t*)(shared_mem + num_experts +
1); // 2d tensor with shape (blockDim.x + 1, num_experts)
for (int i = 0; i < num_experts; ++i) {
tokens_cnts[index(num_experts, threadIdx.x + 1, i)] = 0;