Skip to content

Commit

Permalink
fix: huggingface#14486 do not use BertPooler in DPR
Browse files Browse the repository at this point in the history
  • Loading branch information
PaulLerner committed Jan 7, 2022
1 parent f18c6fa commit 95eaf44
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/transformers/models/dpr/modeling_dpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ class DPREncoder(DPRPreTrainedModel):

def __init__(self, config: DPRConfig):
super().__init__(config)
self.bert_model = BertModel(config)
self.bert_model = BertModel(config, add_pooling_layer=False)
assert self.bert_model.config.hidden_size > 0, "Encoder hidden_size can't be zero"
self.projection_dim = config.projection_dim
if self.projection_dim > 0:
Expand All @@ -202,7 +202,7 @@ def forward(
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output, pooled_output = outputs[:2]
sequence_output = outputs[0]
pooled_output = sequence_output[:, 0, :]
if self.projection_dim > 0:
pooled_output = self.encode_proj(pooled_output)
Expand Down

0 comments on commit 95eaf44

Please sign in to comment.