[benchmark] Add triton version in the moe tuned config (#24769)

Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
This commit is contained in:
Jee Jee Li 2025-09-16 14:10:54 +08:00 committed by GitHub
parent 238c4c1705
commit 04ad0dc275
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 6 additions and 3 deletions

View File

@ -560,7 +560,7 @@ def save_configs(
filename = os.path.join(save_dir, filename)
print(f"Writing best config to {filename}...")
with open(filename, "w") as f:
json.dump(configs, f, indent=4)
json.dump({"triton_version": triton.__version__, **configs}, f, indent=4)
f.write("\n")

View File

@ -720,7 +720,10 @@ def get_moe_configs(
logger.info("Using configuration from %s for MoE layer.",
config_file_path)
# If a configuration has been found, return it
return {int(key): val for key, val in json.load(f).items()}
tuned_config = json.load(f)
# Delete triton_version from tuned_config
tuned_config.pop("triton_version", None)
return {int(key): val for key, val in tuned_config.items()}
# If no optimized configuration is available, we will use the default
# configuration

View File

@ -68,7 +68,7 @@ class TritonPlaceholder(types.ModuleType):
def __init__(self):
super().__init__("triton")
self.__version__ = "3.3.0"
self.__version__ = "3.4.0"
self.jit = self._dummy_decorator("jit")
self.autotune = self._dummy_decorator("autotune")
self.heuristics = self._dummy_decorator("heuristics")