@@ -179,8 +179,7 @@ def check_available_online(
179179                                          min_transformers_version = "4.54" ),
180180    "Fairseq2LlamaForCausalLM" : _HfExamplesInfo ("mgleize/fairseq2-dummy-Llama-3.2-1B" ),  # noqa: E501 
181181    "FalconForCausalLM" : _HfExamplesInfo ("tiiuae/falcon-7b" ),
182-     "FalconH1ForCausalLM" :_HfExamplesInfo ("tiiuae/Falcon-H1-0.5B-Base" ,
183-                                           min_transformers_version = "4.53" ),
182+     "FalconH1ForCausalLM" :_HfExamplesInfo ("tiiuae/Falcon-H1-0.5B-Base" ),
184183    "GemmaForCausalLM" : _HfExamplesInfo ("google/gemma-1.1-2b-it" ),
185184    "Gemma2ForCausalLM" : _HfExamplesInfo ("google/gemma-2-9b" ),
186185    "Gemma3ForCausalLM" : _HfExamplesInfo ("google/gemma-3-1b-it" ),
@@ -223,7 +222,10 @@ def check_available_online(
223222                                            trust_remote_code = True ),
224223    "JAISLMHeadModel" : _HfExamplesInfo ("inceptionai/jais-13b-chat" ),
225224    "JambaForCausalLM" : _HfExamplesInfo ("ai21labs/AI21-Jamba-1.5-Mini" ,
226-                                         extras = {"tiny" : "ai21labs/Jamba-tiny-dev" }),  # noqa: E501 
225+                                         extras = {
226+                                             "tiny" : "ai21labs/Jamba-tiny-dev" ,
227+                                             "random" : "ai21labs/Jamba-tiny-random" ,  # noqa: E501 
228+                                         }),
227229    "LlamaForCausalLM" : _HfExamplesInfo ("meta-llama/Llama-3.2-1B-Instruct" ,
228230                                        extras = {"guard" : "meta-llama/Llama-Guard-3-1B" ,  # noqa: E501 
229231                                                "hermes" : "NousResearch/Hermes-3-Llama-3.1-8B" , # noqa: E501 
@@ -239,8 +241,7 @@ def check_available_online(
239241                                         trust_remote_code = True ),
240242    "MiniCPM3ForCausalLM" : _HfExamplesInfo ("openbmb/MiniCPM3-4B" ,
241243                                         trust_remote_code = True ),
242-     "MiniMaxForCausalLM" : _HfExamplesInfo ("MiniMaxAI/MiniMax-Text-01-hf" ,
243-                                           min_transformers_version = "4.53" ),
244+     "MiniMaxForCausalLM" : _HfExamplesInfo ("MiniMaxAI/MiniMax-Text-01-hf" ),
244245    "MiniMaxText01ForCausalLM" : _HfExamplesInfo ("MiniMaxAI/MiniMax-Text-01" ,
245246                                                trust_remote_code = True ,
246247                                                revision = "a59aa9cbc53b9fb8742ca4e9e1531b9802b6fdc3" ),  # noqa: E501 
@@ -272,6 +273,8 @@ def check_available_online(
272273    "PhiMoEForCausalLM" : _HfExamplesInfo ("microsoft/Phi-3.5-MoE-instruct" ,
273274                                         trust_remote_code = True ),
274275    "Plamo2ForCausalLM" : _HfExamplesInfo ("pfnet/plamo-2-1b" ,
276+                                          max_transformers_version = "4.53" ,
277+                                          transformers_version_reason = "vLLM impl inherits PreTrainedModel and clashes with get_input_embeddings" ,  # noqa: E501 
275278                                        trust_remote_code = True ),
276279    "QWenLMHeadModel" : _HfExamplesInfo ("Qwen/Qwen-7B-Chat" ,
277280                                       trust_remote_code = True ),
@@ -299,8 +302,7 @@ def check_available_online(
299302    "Zamba2ForCausalLM" : _HfExamplesInfo ("Zyphra/Zamba2-7B-instruct" ),
300303    "MiMoForCausalLM" : _HfExamplesInfo ("XiaomiMiMo/MiMo-7B-RL" ,
301304                                        trust_remote_code = True ),
302-     "Dots1ForCausalLM" : _HfExamplesInfo ("rednote-hilab/dots.llm1.inst" ,
303-                                         min_transformers_version = "4.53" ),
305+     "Dots1ForCausalLM" : _HfExamplesInfo ("rednote-hilab/dots.llm1.inst" ),
304306    # [Encoder-decoder] 
305307    "BartModel" : _HfExamplesInfo ("facebook/bart-base" ),
306308    "BartForConditionalGeneration" : _HfExamplesInfo ("facebook/bart-large-cnn" ),
@@ -326,8 +328,12 @@ def check_available_online(
326328    "NomicBertModel" : _HfExamplesInfo ("nomic-ai/nomic-embed-text-v2-moe" ,
327329                                               trust_remote_code = True , v0_only = True ),  # noqa: E501 
328330    "Qwen2Model" : _HfExamplesInfo ("ssmits/Qwen2-7B-Instruct-embed-base" ),
329-     "Qwen2ForRewardModel" : _HfExamplesInfo ("Qwen/Qwen2.5-Math-RM-72B" ),
330-     "Qwen2ForProcessRewardModel" : _HfExamplesInfo ("Qwen/Qwen2.5-Math-PRM-7B" ),
331+     "Qwen2ForRewardModel" : _HfExamplesInfo ("Qwen/Qwen2.5-Math-RM-72B" ,
332+                                            max_transformers_version = "4.53" ,
333+                                            transformers_version_reason = "HF model uses remote code that is not compatible with latest Transformers" ),  # noqa: E501 
334+     "Qwen2ForProcessRewardModel" : _HfExamplesInfo ("Qwen/Qwen2.5-Math-PRM-7B" ,
335+                                                   max_transformers_version = "4.53" ,
336+                                                   transformers_version_reason = "HF model uses remote code that is not compatible with latest Transformers" ),  # noqa: E501 
331337    "RobertaModel" : _HfExamplesInfo ("sentence-transformers/stsb-roberta-base-v2" , v0_only = True ),  # noqa: E501 
332338    "RobertaForMaskedLM" : _HfExamplesInfo ("sentence-transformers/all-roberta-large-v1" , v0_only = True ),  # noqa: E501 
333339    "XLMRobertaModel" : _HfExamplesInfo ("intfloat/multilingual-e5-small" , v0_only = True ),  # noqa: E501 
0 commit comments