From dac6a3f6ed14ea4061b672f9290bfdf8bcdd996d Mon Sep 17 00:00:00 2001 From: Steve Grubb Date: Fri, 10 May 2024 09:37:05 -0400 Subject: [PATCH] [Misc] Apply a couple g++ cleanups (#4719) --- csrc/cpu/cache.cpp | 2 +- csrc/cpu/pos_encoding.cpp | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/csrc/cpu/cache.cpp b/csrc/cpu/cache.cpp index 620d11ef1ed6..26e81685d623 100644 --- a/csrc/cpu/cache.cpp +++ b/csrc/cpu/cache.cpp @@ -84,7 +84,7 @@ void reshape_and_cache_cpu_impl( void copy_blocks(std::vector &key_caches, std::vector &value_caches, const torch::Tensor& block_mapping) { - int num_layers = key_caches.size(); + unsigned num_layers = key_caches.size(); TORCH_CHECK(num_layers == value_caches.size()); if (num_layers == 0) { return; diff --git a/csrc/cpu/pos_encoding.cpp b/csrc/cpu/pos_encoding.cpp index e9b3992204bb..5dc1bde45ac5 100644 --- a/csrc/cpu/pos_encoding.cpp +++ b/csrc/cpu/pos_encoding.cpp @@ -19,7 +19,6 @@ void rotary_embedding_impl( const int num_tokens) { using scalar_vec_t = vec_op::vec_t; constexpr int VEC_ELEM_NUM = scalar_vec_t::get_elem_num(); - constexpr int ELEM_SIZE = sizeof(scalar_t); const int embed_dim = rot_dim / 2; TORCH_CHECK(embed_dim % VEC_ELEM_NUM == 0);