Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
patrickvonplaten authored and sgugger committed Sep 29, 2021
1 parent dc193c9 commit 11144a3
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 0 deletions.
8 changes: 8 additions & 0 deletions src/transformers/models/hubert/modeling_hubert.py
Expand Up @@ -964,6 +964,14 @@ def __init__(self, config):

self.hubert = HubertModel(config)
self.dropout = nn.Dropout(config.final_dropout)

if config.vocab_size is None:
raise ValueError(
f"You are trying to instantiate {self.__class__} with a configuration that "
"does not define the vocabulary size of the language model head. Please "
"instantiate the model as follows: `HubertForCTC.from_pretrained(..., vocab_size=vocab_size)`. "
"or define `vocab_size` of your model's configuration."
)
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size)

self.init_weights()
Expand Down
8 changes: 8 additions & 0 deletions src/transformers/models/wav2vec2/modeling_wav2vec2.py
Expand Up @@ -1416,6 +1416,14 @@ def __init__(self, config):

self.wav2vec2 = Wav2Vec2Model(config)
self.dropout = nn.Dropout(config.final_dropout)

if config.vocab_size is None:
raise ValueError(
f"You are trying to instantiate {self.__class__} with a configuration that "
"does not define the vocabulary size of the language model head. Please "
"instantiate the model as follows: `Wav2Vec2ForCTC.from_pretrained(..., vocab_size=vocab_size)`."
"or define `vocab_size` of your model's configuration."
)
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size)

self.init_weights()
Expand Down

0 comments on commit 11144a3

Please sign in to comment.