Skip to content

Commit

Permalink
clean up logits extraction logic
Browse files Browse the repository at this point in the history
  • Loading branch information
thomwolf committed Nov 6, 2018
1 parent 4e05244 commit 304dd13
Showing 1 changed file with 13 additions and 16 deletions.
29 changes: 13 additions & 16 deletions run_squad.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,29 +908,26 @@ def main():
model.eval()
all_results = []
logger.info("Start evaluating")
for input_ids, input_mask, segment_ids, example_index in tqdm(eval_dataloader, desc="Evaluating"):
for input_ids, input_mask, segment_ids, example_indices in tqdm(eval_dataloader, desc="Evaluating"):
if len(all_results) % 1000 == 0:
logger.info("Processing example: %d" % (len(all_results)))

input_ids = input_ids.to(device)
input_mask = input_mask.to(device)
segment_ids = segment_ids.to(device)

start_logits, end_logits = model(input_ids, segment_ids, input_mask)

unique_id = [int(eval_features[e.item()].unique_id) for e in example_index]
start_logits = [x.view(-1).detach().cpu().numpy() for x in start_logits]
end_logits = [x.view(-1).detach().cpu().numpy() for x in end_logits]
for idx, i in enumerate(unique_id):
s = [float(x) for x in start_logits[idx]]
e = [float(x) for x in end_logits[idx]]
all_results.append(
RawResult(
unique_id=i,
start_logits=s,
end_logits=e
)
)
with torch.no_grad():
batch_start_logits, batch_end_logits = model(input_ids, segment_ids, input_mask)

for i, example_index in enumerate(example_indices):
start_logits = batch_start_logits[i].detach().cpu().tolist()
end_logits = batch_end_logits[i].detach().cpu().tolist()

eval_feature = eval_features[example_index.item()]
unique_id = int(eval_feature.unique_id)
all_results.append(RawResult(unique_id=unique_id,
start_logits=start_logits,
end_logits=end_logits))

output_prediction_file = os.path.join(args.output_dir, "predictions.json")
output_nbest_file = os.path.join(args.output_dir, "nbest_predictions.json")
Expand Down

0 comments on commit 304dd13

Please sign in to comment.