diff --git a/docs/source/bettertransformer/overview.mdx b/docs/source/bettertransformer/overview.mdx index 228fb812063..c94e866428b 100644 --- a/docs/source/bettertransformer/overview.mdx +++ b/docs/source/bettertransformer/overview.mdx @@ -14,7 +14,6 @@ specific language governing permissions and limitations under the License. 🤗 Optimum provides an integration with `BetterTransformer`, a stable API from PyTorch to benefit from interesting speedups on CPU & GPU through sparsity and fused kernels. - ## Quickstart Since its 1.13 version, PyTorch released the stable version of `BetterTransformer` in its library. You can benefit from interesting speedup on most consumer-type devices, including CPUs, older and newer versions of NIVIDIA GPUs. @@ -23,6 +22,7 @@ You can now use this feature in 🤗 Optimum together with Transformers and use ### Supported models The list of supported model below: + - [AlBERT](https://arxiv.org/abs/1909.11942) - [BART](https://arxiv.org/abs/1910.13461) - [BERT](https://arxiv.org/abs/1810.04805) @@ -41,6 +41,7 @@ The list of supported model below: - [M2M100](https://arxiv.org/abs/2010.11125) - [RoBERTa](https://arxiv.org/abs/1907.11692) - [Splinter](https://arxiv.org/abs/2101.00438) +- [Tapas](https://arxiv.org/abs/2211.06550) - [ViLT](https://arxiv.org/abs/2102.03334) - [ViT](https://arxiv.org/abs/2010.11929) - [ViT-MAE](https://arxiv.org/abs/2111.06377) diff --git a/optimum/bettertransformer/models/__init__.py b/optimum/bettertransformer/models/__init__.py index 0106fae977e..05a16c00107 100644 --- a/optimum/bettertransformer/models/__init__.py +++ b/optimum/bettertransformer/models/__init__.py @@ -29,6 +29,7 @@ BETTER_TRANFORMER_LAYERS_MAPPING_DICT = { # Bert Family + "TapasLayer": BertLayerBetterTransformer, "BertLayer": BertLayerBetterTransformer, "ElectraLayer": BertLayerBetterTransformer, "Data2VecTextLayer": BertLayerBetterTransformer, diff --git a/tests/bettertransformer/test_bettertransformer_encoder.py b/tests/bettertransformer/test_bettertransformer_encoder.py index f8f5f024474..5ab8e94e6fa 100644 --- a/tests/bettertransformer/test_bettertransformer_encoder.py +++ b/tests/bettertransformer/test_bettertransformer_encoder.py @@ -44,6 +44,7 @@ "hf-internal-testing/tiny-random-MarkupLMModel", "hf-internal-testing/tiny-random-BertModel", "ybelkada/random-tiny-BertGenerationModel", + "hf-internal-testing/tiny-random-TapasModel", ] ALL_ENCODER_DECODER_MODELS_TO_TEST = [