lataon commited on
Commit
cd08186
·
1 Parent(s): ff5a4d6

update: token

Browse files
Files changed (2) hide show
  1. app.py +14 -4
  2. init.py +2 -1
app.py CHANGED
@@ -8,6 +8,12 @@ from init import is_model_on_hub, upload_file, load_all_info_from_dataset_hub
8
  from utils_display import PhonemeEvalColumn, fields, make_clickable_model, styled_error, styled_message
9
  import numpy as np
10
  from datetime import datetime, timezone
 
 
 
 
 
 
11
 
12
  LAST_UPDATED = "Oct 2nd 2025"
13
 
@@ -104,7 +110,10 @@ def load_results(results_dir: str) -> pd.DataFrame:
104
  # Load initial data
105
  try:
106
  eval_queue_repo, requested_models, csv_results = load_all_info_from_dataset_hub()
107
- if csv_results and csv_results.exists():
 
 
 
108
  original_df = pd.read_csv(csv_results)
109
  # Format the columns
110
  def formatter(x):
@@ -182,7 +191,7 @@ def request_model(model_text, chb_phoneme_asr, chb_kids_phoneme_md):
182
 
183
  fn_datasets = '@ '.join(dataset_selection)
184
  filename = model_text.replace("/","@") + "@@" + fn_datasets
185
- if filename in requested_models:
186
  return styled_error(f"A request for this model '{model_text}' and dataset(s) was already made.")
187
  try:
188
  filename_ext = filename + ".txt"
@@ -195,7 +204,8 @@ def request_model(model_text, chb_phoneme_asr, chb_kids_phoneme_md):
195
  upload_file(filename, out_filepath)
196
 
197
  # Include file in the list of uploaded files
198
- requested_models.append(filename)
 
199
 
200
  # Remove the local file
201
  out_filepath.unlink()
@@ -286,4 +296,4 @@ with gr.Blocks(css=LEADERBOARD_CSS) as demo:
286
  show_copy_button=True,
287
  )
288
 
289
- demo.launch(ssr_mode=False)
 
8
  from utils_display import PhonemeEvalColumn, fields, make_clickable_model, styled_error, styled_message
9
  import numpy as np
10
  from datetime import datetime, timezone
11
+ from dotenv import load_dotenv
12
+
13
+ # Load environment variables from .env file
14
+ load_dotenv()
15
+
16
+ HF_TOKEN = os.environ.get("HF_TOKEN", None)
17
 
18
  LAST_UPDATED = "Oct 2nd 2025"
19
 
 
110
  # Load initial data
111
  try:
112
  eval_queue_repo, requested_models, csv_results = load_all_info_from_dataset_hub()
113
+ if eval_queue_repo is None or requested_models is None or csv_results is None:
114
+ # No token provided, fallback to local results
115
+ original_df = load_results(EVAL_RESULTS_DIR)
116
+ elif csv_results and csv_results.exists():
117
  original_df = pd.read_csv(csv_results)
118
  # Format the columns
119
  def formatter(x):
 
191
 
192
  fn_datasets = '@ '.join(dataset_selection)
193
  filename = model_text.replace("/","@") + "@@" + fn_datasets
194
+ if requested_models and filename in requested_models:
195
  return styled_error(f"A request for this model '{model_text}' and dataset(s) was already made.")
196
  try:
197
  filename_ext = filename + ".txt"
 
204
  upload_file(filename, out_filepath)
205
 
206
  # Include file in the list of uploaded files
207
+ if requested_models is not None:
208
+ requested_models.append(filename)
209
 
210
  # Remove the local file
211
  out_filepath.unlink()
 
296
  show_copy_button=True,
297
  )
298
 
299
+ demo.launch()
init.py CHANGED
@@ -39,7 +39,8 @@ def load_all_info_from_dataset_hub():
39
  if csv_results is None:
40
  passed = False
41
  if not passed:
42
- raise ValueError("No Hugging Face token provided. Skipping evaluation requests and results.")
 
43
 
44
  return eval_queue_repo, requested_models, csv_results
45
 
 
39
  if csv_results is None:
40
  passed = False
41
  if not passed:
42
+ print("No Hugging Face token provided. Skipping evaluation requests and results.")
43
+ return None, None, None
44
 
45
  return eval_queue_repo, requested_models, csv_results
46