Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 33 additions & 18 deletions vllm/transformers_utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,10 @@
from vllm.transformers_utils.configs import (ChatGLMConfig, DeepseekVLV2Config,
EAGLEConfig, JAISConfig,
KimiVLConfig, MedusaConfig,
MllamaConfig, MLPSpeculatorConfig,
MLPSpeculatorConfig,
Nemotron_Nano_VL_Config,
NemotronConfig, NVLM_D_Config,
OvisConfig, RWConfig,
SpeculatorsConfig,
NemotronConfig, OvisConfig,
RWConfig, SpeculatorsConfig,
Step3TextConfig, Step3VLConfig,
UltravoxConfig)
# yapf: enable
Expand Down Expand Up @@ -68,10 +67,6 @@ def _get_hf_token() -> Optional[str]:
return None


_CONFIG_REGISTRY_OVERRIDE_HF: dict[str, type[PretrainedConfig]] = {
"mllama": MllamaConfig
}

_CONFIG_REGISTRY: dict[str, type[PretrainedConfig]] = {
"chatglm": ChatGLMConfig,
"deepseek_vl_v2": DeepseekVLV2Config,
Expand All @@ -85,18 +80,30 @@ def _get_hf_token() -> Optional[str]:
"eagle": EAGLEConfig,
"speculators": SpeculatorsConfig,
"nemotron": NemotronConfig,
"NVLM_D": NVLM_D_Config,
"ovis": OvisConfig,
"ultravox": UltravoxConfig,
"step3_vl": Step3VLConfig,
"step3_text": Step3TextConfig,
**_CONFIG_REGISTRY_OVERRIDE_HF
}

_CONFIG_ATTRS_MAPPING: dict[str, str] = {
"llm_config": "text_config",
}

_AUTO_CONFIG_KWARGS_OVERRIDES: dict[str, dict[str, Any]] = {
"internvl_chat": {
"has_no_defaults_at_init": True
},
# transformers regards mllama as is_encoder_decoder=False
# vllm needs is_encoder_decoder=True to enable cross-attention
"mllama": {
"is_encoder_decoder": True
},
"NVLM_D": {
"has_no_defaults_at_init": True
},
}


class ConfigFormat(str, enum.Enum):
AUTO = "auto"
Expand Down Expand Up @@ -273,11 +280,12 @@ def thinker_uses_mrope(config: PretrainedConfig) -> bool:

def is_encoder_decoder(config: PretrainedConfig) -> bool:
"""Detect if the model with this config is used as an encoder/decoder."""
text_config = getattr(config, "text_config", None)
if text_config is not None:
return is_encoder_decoder(text_config)

return getattr(config, "is_encoder_decoder", False)
def _is_encoder_decoder(config: PretrainedConfig) -> bool:
return getattr(config, "is_encoder_decoder", False)

return (_is_encoder_decoder(config)
or _is_encoder_decoder(config.get_text_config()))


def is_interleaved(config: PretrainedConfig) -> bool:
Expand All @@ -291,13 +299,21 @@ def is_interleaved(config: PretrainedConfig) -> bool:
return False


def _maybe_update_auto_config_kwargs(kwargs: dict[str, Any], model_type: str):
"""
Update kwargs for AutoConfig initialization based on model_type
"""
if model_type in _AUTO_CONFIG_KWARGS_OVERRIDES:
kwargs.update(_AUTO_CONFIG_KWARGS_OVERRIDES[model_type])
return kwargs


def _maybe_remap_hf_config_attrs(config: PretrainedConfig) -> PretrainedConfig:
"""Remap config attributes to match the expected names."""
for old_attr, new_attr in _CONFIG_ATTRS_MAPPING.items():
if hasattr(config, old_attr):
if not hasattr(config, new_attr):
config.update({new_attr: getattr(config, old_attr)})
delattr(config, old_attr)
logger.debug("Remapped config attribute '%s' to '%s'", old_attr,
new_attr)
return config
Expand Down Expand Up @@ -408,15 +424,14 @@ def get_config(
)
else:
try:
kwargs = _maybe_update_auto_config_kwargs(
kwargs, model_type=model_type)
config = AutoConfig.from_pretrained(
model,
trust_remote_code=trust_remote_code,
revision=revision,
code_revision=code_revision,
token=_get_hf_token(),
# some old custom model's config needs
# `has_no_defaults_at_init=True` to work.
has_no_defaults_at_init=trust_remote_code,
**kwargs,
)
except ValueError as e:
Expand Down
6 changes: 1 addition & 5 deletions vllm/transformers_utils/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@
from vllm.transformers_utils.configs.jais import JAISConfig
from vllm.transformers_utils.configs.kimi_vl import KimiVLConfig
from vllm.transformers_utils.configs.medusa import MedusaConfig
from vllm.transformers_utils.configs.mllama import MllamaConfig
from vllm.transformers_utils.configs.mlp_speculator import MLPSpeculatorConfig
from vllm.transformers_utils.configs.moonvit import MoonViTConfig
from vllm.transformers_utils.configs.nemotron import NemotronConfig
from vllm.transformers_utils.configs.nemotron_h import NemotronHConfig
from vllm.transformers_utils.configs.nemotron_vl import Nemotron_Nano_VL_Config
from vllm.transformers_utils.configs.nvlm_d import NVLM_D_Config
from vllm.transformers_utils.configs.ovis import OvisConfig
from vllm.transformers_utils.configs.speculators.base import SpeculatorsConfig
from vllm.transformers_utils.configs.step3_vl import (Step3TextConfig,
Expand All @@ -34,18 +32,16 @@
__all__ = [
"ChatGLMConfig",
"DeepseekVLV2Config",
"EAGLEConfig",
"RWConfig",
"JAISConfig",
"MedusaConfig",
"EAGLEConfig",
"MllamaConfig",
"MLPSpeculatorConfig",
"MoonViTConfig",
"KimiVLConfig",
"NemotronConfig",
"NemotronHConfig",
"Nemotron_Nano_VL_Config",
"NVLM_D_Config",
"OvisConfig",
"SpeculatorsConfig",
"UltravoxConfig",
Expand Down
31 changes: 0 additions & 31 deletions vllm/transformers_utils/configs/mllama.py

This file was deleted.

31 changes: 0 additions & 31 deletions vllm/transformers_utils/configs/nvlm_d.py

This file was deleted.