[Bugfix] Re-enable support for ChatGLMForConditionalGeneration (#16187)

Signed-off-by: DarkLight1337 <tlleungac@connect.ust.hk>
This commit is contained in:
Cyrus Leung 2025-04-07 23:15:58 +08:00 committed by GitHub
parent 55dcce91df
commit 027b204ff1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 5 additions and 2 deletions

View File

@ -233,9 +233,9 @@ See [this page](#generative-models) for more information on how to use generativ
* `facebook/bart-base`, `facebook/bart-large-cnn`, etc.
*
*
- * `ChatGLMModel`
- * `ChatGLMModel`, `ChatGLMForConditionalGeneration`
* ChatGLM
* `THUDM/chatglm2-6b`, `THUDM/chatglm3-6b`, etc.
* `THUDM/chatglm2-6b`, `THUDM/chatglm3-6b`, `ShieldLM-6B-chatglm3`, etc.
* ✅︎
* ✅︎
- * `CohereForCausalLM`, `Cohere2ForCausalLM`

View File

@ -124,6 +124,8 @@ _TEXT_GENERATION_EXAMPLE_MODELS = {
"BloomForCausalLM": _HfExamplesInfo("bigscience/bloomz-1b1"),
"ChatGLMModel": _HfExamplesInfo("THUDM/chatglm3-6b",
trust_remote_code=True),
"ChatGLMForConditionalGeneration": _HfExamplesInfo("thu-coai/ShieldLM-6B-chatglm3", # noqa: E501
trust_remote_code=True),
"CohereForCausalLM": _HfExamplesInfo("CohereForAI/c4ai-command-r-v01",
trust_remote_code=True),
"Cohere2ForCausalLM": _HfExamplesInfo("CohereForAI/c4ai-command-r7b-12-2024", # noqa: E501

View File

@ -43,6 +43,7 @@ _TEXT_GENERATION_MODELS = {
"BambaForCausalLM": ("bamba", "BambaForCausalLM"),
"BloomForCausalLM": ("bloom", "BloomForCausalLM"),
"ChatGLMModel": ("chatglm", "ChatGLMForCausalLM"),
"ChatGLMForConditionalGeneration": ("chatglm", "ChatGLMForCausalLM"),
"CohereForCausalLM": ("commandr", "CohereForCausalLM"),
"Cohere2ForCausalLM": ("commandr", "CohereForCausalLM"),
"DbrxForCausalLM": ("dbrx", "DbrxForCausalLM"),