Spaces:
Runtime error
Runtime error
File size: 1,610 Bytes
8e8e715 0a05aa7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
# Importing the library
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import torch
from huggingface_hub import notebook_login
# Load the Pre-trained Model and Tokenizer:
model_name = "Shiko07/tuned_test_trainer-bert-base-uncased"
model = AutoModelForSequenceClassification.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
#Defining the Gradio Interface:
def predict_sentiment(text):
inputs = tokenizer(text, return_tensors="pt")
outputs = model(**inputs)
predicted_class = torch.argmax(outputs.logits, dim=1).item()
return {0: "Negative", 1: "Neutral", 2: "Positive"}[predicted_class]
#Creating Gradio interface:
custom_css = """
.gradio {
background-color: #0074D9; /* Change background color to blue */
}
"""
# predict_sentiment function
interface = gr.Interface(
fn=predict_sentiment,
inputs=gr.Textbox(lines=3, label="Enter your text:"),
outputs="text",
# theme="custom_css",
title="Marrakech Sentiment Analysis App",
description="An app for sentiment analysis for Tweet posts on covid 19 vaccine.",
css=custom_css,
examples = [ ["Vaccine misinformation is harmful."],
["I'm hopeful about the vaccine."],
["Second dose excitement."],
["I'm worried about vaccine side effects."],
["Vaccine distribution updates are available."],
["Vaccine distribution is too slow."],
["I'm gathering information about the vaccine."]
]
)
interface.launch()
|