meg-huggingface commited on
Commit
71b7c19
1 Parent(s): 1e11692

Fixing token issue

Browse files
src/leaderboard/read_evals.py CHANGED
@@ -10,6 +10,7 @@ import numpy as np
10
  from src.display.formatting import make_clickable_model
11
  from src.display.utils import AutoEvalColumn, ModelType, Tasks, Precision, WeightType
12
  from src.submission.check_validity import is_model_on_hub
 
13
 
14
 
15
  @dataclass
@@ -56,7 +57,7 @@ class EvalResult:
56
  full_model = "/".join(org_and_model)
57
 
58
  still_on_hub, _, model_config = is_model_on_hub(
59
- full_model, config.get("model_sha", "main"), trust_remote_code=True, test_tokenizer=False
60
  )
61
  architecture = "?"
62
  if model_config is not None:
 
10
  from src.display.formatting import make_clickable_model
11
  from src.display.utils import AutoEvalColumn, ModelType, Tasks, Precision, WeightType
12
  from src.submission.check_validity import is_model_on_hub
13
+ from src.envs import TOKEN
14
 
15
 
16
  @dataclass
 
57
  full_model = "/".join(org_and_model)
58
 
59
  still_on_hub, _, model_config = is_model_on_hub(
60
+ full_model, config.get("model_sha", "main"), token=TOKEN, trust_remote_code=True, test_tokenizer=False
61
  )
62
  architecture = "?"
63
  if model_config is not None:
src/submission/check_validity.py CHANGED
@@ -1,14 +1,13 @@
1
  import json
2
  import os
3
- import re
4
  from collections import defaultdict
5
- from datetime import datetime, timedelta, timezone
6
 
7
  import huggingface_hub
8
  from huggingface_hub import ModelCard
9
  from huggingface_hub.hf_api import ModelInfo
10
  from transformers import AutoConfig
11
- from transformers.models.auto.tokenization_auto import tokenizer_class_from_name, get_tokenizer_config
 
12
 
13
  def check_model_card(repo_id: str) -> tuple[bool, str]:
14
  """Checks if the model card and license exist and have been filled"""
@@ -33,7 +32,7 @@ def check_model_card(repo_id: str) -> tuple[bool, str]:
33
 
34
  def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_remote_code=False, test_tokenizer=False) -> tuple[bool, str]:
35
  try:
36
- config = AutoConfig.from_pretrained(model_name, revision=revision, trust_remote_code=trust_remote_code, token=token)
37
  if test_tokenizer:
38
  try:
39
  print("TOKEN: %s" % token)
@@ -41,7 +40,7 @@ def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_rem
41
  print("revision: %s" % revision)
42
  print("trust remote code: %s" % trust_remote_code)
43
 
44
- tk = AutoTokenizer.from_pretrained(model_name, revision=revision, trust_remote_code=trust_remote_code, token=token)
45
  except ValueError as e:
46
  return (
47
  False,
@@ -49,7 +48,7 @@ def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_rem
49
  None
50
  )
51
  except Exception as e:
52
- return (False, "'s tokenizer cannot be loaded. Is your tokenizer class in a stable transformers release, and correctly configured?", None)
53
  return True, None, config
54
 
55
  except ValueError:
 
1
  import json
2
  import os
 
3
  from collections import defaultdict
 
4
 
5
  import huggingface_hub
6
  from huggingface_hub import ModelCard
7
  from huggingface_hub.hf_api import ModelInfo
8
  from transformers import AutoConfig
9
+ from transformers.models.auto.tokenization_auto import AutoTokenizer
10
+
11
 
12
  def check_model_card(repo_id: str) -> tuple[bool, str]:
13
  """Checks if the model card and license exist and have been filled"""
 
32
 
33
  def is_model_on_hub(model_name: str, revision: str, token: str = None, trust_remote_code=False, test_tokenizer=False) -> tuple[bool, str]:
34
  try:
35
+ config = AutoConfig.from_pretrained(model_name, revision=revision, token=token, trust_remote_code=trust_remote_code)
36
  if test_tokenizer:
37
  try:
38
  print("TOKEN: %s" % token)
 
40
  print("revision: %s" % revision)
41
  print("trust remote code: %s" % trust_remote_code)
42
 
43
+ _ = AutoTokenizer.from_pretrained(model_name, revision=revision, token=token, trust_remote_code=trust_remote_code)
44
  except ValueError as e:
45
  return (
46
  False,
 
48
  None
49
  )
50
  except Exception as e:
51
+ return (False, f"'s tokenizer cannot be loaded. This most commonly happens when your tokenizer class is not in a stable transformers release or not correctly configured?\nExact error is: {e}", None)
52
  return True, None, config
53
 
54
  except ValueError:
src/submission/submit.py CHANGED
@@ -50,7 +50,7 @@ def add_new_eval(
50
  return styled_error(f'Base model "{base_model}" {error}')
51
 
52
  if not weight_type == "Adapter":
53
- model_on_hub, error, _ = is_model_on_hub(model_name=model, revision=revision, test_tokenizer=True)
54
  if not model_on_hub:
55
  return styled_error(f'Model "{model}" {error}')
56
 
 
50
  return styled_error(f'Base model "{base_model}" {error}')
51
 
52
  if not weight_type == "Adapter":
53
+ model_on_hub, error, _ = is_model_on_hub(model_name=model, revision=revision, token=TOKEN, test_tokenizer=True)
54
  if not model_on_hub:
55
  return styled_error(f'Model "{model}" {error}')
56