Skip to content

Commit 9ba85d1

Browse files
committed
Assume NomicBertModel all linear layers do not have bias
1 parent c9989a7 commit 9ba85d1

File tree

1 file changed

+7
-6
lines changed

1 file changed

+7
-6
lines changed

vllm/model_executor/models/bert.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -645,6 +645,11 @@ def _build_model(self,
645645
assert config.__class__.__name__ == "NomicBertConfig"
646646
assert config.activation_function == "swiglu"
647647

648+
# Assume NomicBertModel all linear layers do not have bias
649+
assert not config.mlp_fc1_bias
650+
assert not config.mlp_fc2_bias
651+
assert not config.qkv_proj_bias
652+
648653
config.layer_norm_eps = config.layer_norm_epsilon
649654
config.position_embedding_type = "rotary"
650655
config.intermediate_size = config.n_inner
@@ -664,14 +669,10 @@ def _build_model(self,
664669
}
665670
}
666671

667-
assert config.mlp_fc1_bias == config.mlp_fc2_bias
668-
bias = config.qkv_proj_bias
669-
gate_up_proj_bias = config.mlp_fc1_bias
670-
671672
return BertModel(vllm_config=vllm_config,
672673
prefix=prefix,
673-
bias=bias,
674-
gate_up_proj_bias=gate_up_proj_bias,
674+
bias=False,
675+
gate_up_proj_bias=False,
675676
rotary_kwargs=rotary_kwargs,
676677
embedding_class=BertEmbedding)
677678

0 commit comments

Comments
 (0)