cifkao commited on
Commit
228bed3
1 Parent(s): 7214b0b

Add unimplemented KL divergence option

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -45,7 +45,7 @@ st.header("Context length probing")
45
 
46
  with st.form("form"):
47
  model_name = st.selectbox("Model", ["distilgpt2", "gpt2", "EleutherAI/gpt-neo-125m"])
48
- metric_name = st.selectbox("Metric", ["Cross entropy"])
49
 
50
  window_len = st.select_slider("Window size", options=[8, 16, 32, 64, 128, 256, 512, 1024], value=512)
51
  text = st.text_area(
@@ -55,6 +55,10 @@ with st.form("form"):
55
 
56
  st.form_submit_button("Submit")
57
 
 
 
 
 
58
  tokenizer = st.cache_resource(AutoTokenizer.from_pretrained, show_spinner=False)(model_name)
59
  model = st.cache_resource(AutoModelForCausalLM.from_pretrained, show_spinner=False)(model_name)
60
 
 
45
 
46
  with st.form("form"):
47
  model_name = st.selectbox("Model", ["distilgpt2", "gpt2", "EleutherAI/gpt-neo-125m"])
48
+ metric_name = st.selectbox("Metric", ["KL divergence", "Cross entropy"], index=1)
49
 
50
  window_len = st.select_slider("Window size", options=[8, 16, 32, 64, 128, 256, 512, 1024], value=512)
51
  text = st.text_area(
 
55
 
56
  st.form_submit_button("Submit")
57
 
58
+ if metric_name == "KL divergence":
59
+ st.error("KL divergence is not supported yet. Stay tuned!", icon="😭")
60
+ st.stop()
61
+
62
  tokenizer = st.cache_resource(AutoTokenizer.from_pretrained, show_spinner=False)(model_name)
63
  model = st.cache_resource(AutoModelForCausalLM.from_pretrained, show_spinner=False)(model_name)
64