import streamlit as st
import pandas as pd
from transformers import pipeline
# Load the anomalies data
df = pd.read_csv('anomalies.csv', sep=',', decimal='.')
# Function to generate a response
def response(question):
question = user_question # ou o que você está passando como input
if not isinstance(question, str):
raise TypeError(f"Esperado uma string para a pergunta, mas recebeu {type(question)}")
tqa = pipeline(task="table-question-answering", model="google/tapas-large-finetuned-wtq")
final_rresposta = tqa(table=df, query=question)['cells'][0]
return final_response
# Streamlit interface
st.markdown("""
""", unsafe_allow_html=True)
# Chat history
if 'history' not in st.session_state:
st.session_state['history'] = []
# Input box for user question
user_question = st.text_input("Escreva sua questão aqui:", "")
if user_question:
# Add person emoji when typing question
st.session_state['history'].append(('👤', user_question))
st.markdown(f"**👤 {user_question}**")
# Generate the response
bot_response = response(user_question)
# Add robot emoji when generating response and align to the right
st.session_state['history'].append(('🤖', bot_response))
st.markdown(f"**🤖 {bot_response}**
", unsafe_allow_html=True)
# Clear history button
if st.button("Limpar"):
st.session_state['history'] = []
# Display chat history
for sender, message in st.session_state['history']:
if sender == '👤':
st.markdown(f"**👤 {message}**")
elif sender == '🤖':
st.markdown(f"**🤖 {message}**
", unsafe_allow_html=True)