File size: 4,184 Bytes
888b8af
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97de3aa
888b8af
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c54a918
 
4e10583
888b8af
c54a918
888b8af
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50d69b4
888b8af
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# Importing Necessary Libraries
import gradio as gr
from llama_index import download_loader, ServiceContext, VectorStoreIndex
from llama_index.embeddings import HuggingFaceEmbedding
from llama_index import Prompt
import torch
device = torch.device("cpu")

# Loading the Zephyr Model using Llama CPP
from llama_index.llms import LlamaCPP
llm = LlamaCPP(
    model_url='https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/resolve/main/zephyr-7b-beta.Q5_K_M.gguf?download=true',
    model_path=None,
    temperature=0.5,
    max_new_tokens=2000,
    context_window=3900,
    # set to at least 1 to use GPU
    model_kwargs={"n_gpu_layers": 0}
)

# Loading Embedding Model
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-base-en-v1.5")

# Defining custom Prompt
TEMPLATE_STR = (
    '''You are an helpful and responsible AI assistant who is excited to help user and answer the question politely but will never harm humans or engage in the activity that causes harm to anyone. Use the given context below if useful.
{context}
<|user|>\n
{query_str}\n
<|assistant|>\n'''
)
QA_TEMPLATE = Prompt(TEMPLATE_STR)

# User Interface functions
def build_the_bot(file):
    global service_context, index
    if file is not None and file.name.endswith(".xlsx"):
        # Loading Data
        PandasExcelReader = download_loader("PandasExcelReader")
        loader = PandasExcelReader(pandas_config={"header": 0})
        documents = loader.load_data(file=file)

        service_context = ServiceContext.from_defaults(
            chunk_size=150,chunk_overlap=10,
            llm=llm,embed_model=embed_model,
            )

        index = VectorStoreIndex.from_documents(documents, service_context=service_context,text_qa_template=QA_TEMPLATE)

        return (gr.update(visible=True),gr.update(visible=True),gr.update(visible=True),gr.update(visible=True,)) #(4 gr.update because the outputs are 4 of upload.change)
    else:
        # Display a message if no file is uploaded 
        return (gr.Textbox(placeholder="Please upload an excel file, refresh the page to restart the app"),gr.update(visible=True),gr.update(visible=False),gr.update(visible=True))

def chat(user_input,history):
  if user_input=="":
        return "Please write your query so that I can assist you even better.",history
  else:
    global service_context, index
    query_engine = index.as_query_engine(streaming=False)
    bot_response = query_engine.query(user_input)
    bot_response = str(bot_response)
    history.append((user_input, bot_response))
    return "", history

def clear_everything():
    return (None, None, None)

# Adding themes in UI Interface
custom_theme = gr.themes.Monochrome()

# UI Design and Logic
with gr.Blocks(theme=custom_theme,title="Marketing Email Generator") as demo:
    gr.HTML("<h1 style='text-align: center;'>Marketing Email Generator</h1>")
    gr.Markdown("Drop you Excel file here 👇 and ask your query about it!")
    with gr.Row():
        with gr.Column(scale=3):
            upload = gr.File(label="Upload Your Excel File only", type="filepath")
            with gr.Row():
                clear_button = gr.Button("Clear", variant="secondary")

        with gr.Column(scale=6):
            chatbot = gr.Chatbot()
            with gr.Row():
                with gr.Column(scale=8):
                    question = gr.Textbox(
                        show_label=False,
                        placeholder="Type your query here after uploading the excel file...",
                    )
                with gr.Column(scale=1, min_width=60):
                    submit_button = gr.Button("Ask me 🤖", variant="primary")

    upload.change(fn=build_the_bot,
                  inputs=[upload],
                  outputs=[question,clear_button,submit_button,chatbot],
                  api_name="upload")

    question.submit(chat, [question, chatbot], [question, chatbot])
    submit_button.click(chat, [question, chatbot], [question, chatbot])

    clear_button.click(fn=clear_everything,inputs=[],
                       outputs=[upload, question, chatbot],
                       api_name="clear")

if __name__ == "__main__":
    demo.launch(share=True, debug=True)