Skip to content

Commit

Permalink
huggingface#23388 Issue: Update RoBERTa configuration (huggingface#23863
Browse files Browse the repository at this point in the history
)
  • Loading branch information
vijethmoudgalya authored and sheonhan committed Jun 1, 2023
1 parent ca330a1 commit c399d7a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions src/transformers/models/roberta/configuration_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class RobertaConfig(PretrainedConfig):
Args:
vocab_size (`int`, *optional*, defaults to 30522):
vocab_size (`int`, *optional*, defaults to 50265):
Vocabulary size of the RoBERTa model. Defines the number of different tokens that can be represented by the
`inputs_ids` passed when calling [`RobertaModel`] or [`TFRobertaModel`].
hidden_size (`int`, *optional*, defaults to 768):
Expand Down Expand Up @@ -105,7 +105,7 @@ class RobertaConfig(PretrainedConfig):

def __init__(
self,
vocab_size=30522,
vocab_size=50265,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class RobertaPreLayerNormConfig(PretrainedConfig):
Args:
vocab_size (`int`, *optional*, defaults to 30522):
vocab_size (`int`, *optional*, defaults to 50265):
Vocabulary size of the RoBERTa-PreLayerNorm model. Defines the number of different tokens that can be
represented by the `inputs_ids` passed when calling [`RobertaPreLayerNormModel`] or
[`TFRobertaPreLayerNormModel`].
Expand Down Expand Up @@ -106,7 +106,7 @@ class RobertaPreLayerNormConfig(PretrainedConfig):

def __init__(
self,
vocab_size=30522,
vocab_size=50265,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
Expand Down

0 comments on commit c399d7a

Please sign in to comment.