leejunhyeok commited on
Commit
7ed4264
·
verified ·
1 Parent(s): a14cdf2

Update configuration_motif.py

Browse files
Files changed (1) hide show
  1. configuration_motif.py +0 -4
configuration_motif.py CHANGED
@@ -136,10 +136,6 @@ class MotifConfig(PretrainedConfig):
136
  attention_dropout=0.0,
137
  **kwargs,
138
  ):
139
- """
140
- Arguments:
141
- multi_token_heads: If not None, use multi-token heads as in the paper https://arxiv.org/pdf/2404.19737
142
- """
143
 
144
  self.vocab_size = vocab_size
145
  self.max_position_embeddings = max_position_embeddings
 
136
  attention_dropout=0.0,
137
  **kwargs,
138
  ):
 
 
 
 
139
 
140
  self.vocab_size = vocab_size
141
  self.max_position_embeddings = max_position_embeddings