Illia56 commited on
Commit
c88185a
1 Parent(s): da2bacc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -11,12 +11,12 @@ a Llama 2 model with 70B parameters fine-tuned for chat instructions.
11
  # Initialize client
12
 
13
 
14
- # with st.sidebar:
15
- # system_promptSide = st.text_input("Optional system prompt:")
16
- # temperatureSide = st.slider("Temperature", min_value=0.0, max_value=1.0, value=0.9, step=0.05)
17
- # max_new_tokensSide = st.slider("Max new tokens", min_value=0.0, max_value=4096.0, value=4096.0, step=64.0)
18
- # ToppSide = st.slider("Top-p (nucleus sampling)", min_value=0.0, max_value=1.0, value=0.6, step=0.05)
19
- # RepetitionpenaltySide = st.slider("Repetition penalty", min_value=0.0, max_value=2.0, value=1.2, step=0.05)
20
 
21
 
22
 
@@ -59,7 +59,7 @@ if prompt := st.chat_input("Ask LLama-2-70b anything..."):
59
  # Add user message to chat history
60
  st.session_state.messages.append({"role": "human", "content": prompt})
61
 
62
- response = predict(prompt)#,system_promptSide,temperatureSide,max_new_tokensSide,ToppSide,RepetitionpenaltySide)
63
  # Display assistant response in chat message container
64
  with st.chat_message("assistant", avatar='🦙'):
65
  st.markdown(response)
 
11
  # Initialize client
12
 
13
 
14
+ with st.sidebar:
15
+ system_promptSide = st.text_input("Optional system prompt:")
16
+ temperatureSide = st.slider("Temperature", min_value=0.0, max_value=1.0, value=0.9, step=0.05)
17
+ max_new_tokensSide = st.slider("Max new tokens", min_value=0.0, max_value=4096.0, value=4096.0, step=64.0)
18
+ ToppSide = st.slider("Top-p (nucleus sampling)", min_value=0.0, max_value=1.0, value=0.6, step=0.05)
19
+ RepetitionpenaltySide = st.slider("Repetition penalty", min_value=0.0, max_value=2.0, value=1.2, step=0.05)
20
 
21
 
22
 
 
59
  # Add user message to chat history
60
  st.session_state.messages.append({"role": "human", "content": prompt})
61
 
62
+ response = predict(prompt, system_promptSide,temperatureSide,max_new_tokensSide,ToppSide,RepetitionpenaltySide)
63
  # Display assistant response in chat message container
64
  with st.chat_message("assistant", avatar='🦙'):
65
  st.markdown(response)