Implementation of TableFormer, Robust Transformer Modeling for Table-Text Encoding, in Pytorch. The claim of this paper is that through attentional biases, they can make transformers more robust to perturbations to the table in question. They show improved results compared to TAPAS
@article{Yang2022TableFormerRT,
title = {TableFormer: Robust Transformer Modeling for Table-Text Encoding},
author = {Jingfeng Yang and Aditya Gupta and Shyam Upadhyay and Luheng He and Rahul Goel and Shachi Paul},
journal = {ArXiv},
year = {2022},
volume = {abs/2203.00274}
}