Grosy commited on
Commit
8eaf3f5
1 Parent(s): 5bec655

Fixed link

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -55,7 +55,7 @@ def load_model_and_tokenizer():
55
 
56
  @st.cache(hash_funcs={transformers.models.bert.tokenization_bert_fast.BertTokenizerFast: lambda _: None, transformers.models.bert.modeling_bert.BertModel: lambda _: None})
57
  def load_hu_model_and_tokenizer():
58
- multilingual_checkpoint = 'sentence-transformers/SZTAKI-HLT/hubert-base-cc' #alternative: SZTAKI-HLT/hubert-base-cc
59
  tokenizer = AutoTokenizer.from_pretrained(multilingual_checkpoint)
60
  model = AutoModel.from_pretrained(multilingual_checkpoint)
61
  print(type(tokenizer))
 
55
 
56
  @st.cache(hash_funcs={transformers.models.bert.tokenization_bert_fast.BertTokenizerFast: lambda _: None, transformers.models.bert.modeling_bert.BertModel: lambda _: None})
57
  def load_hu_model_and_tokenizer():
58
+ multilingual_checkpoint = 'SZTAKI-HLT/hubert-base-cc' #alternative: SZTAKI-HLT/hubert-base-cc
59
  tokenizer = AutoTokenizer.from_pretrained(multilingual_checkpoint)
60
  model = AutoModel.from_pretrained(multilingual_checkpoint)
61
  print(type(tokenizer))