NeuraFusionAI commited on
Commit
7d55d92
1 Parent(s): 19e98cb

Initial commit with chatbot script and requirements

Browse files
building_chatbot_ui_with_microsoft_godel_&_gradio.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+
4
+
5
+
6
+ GODEL - (Grounded Open
7
+ Dialogue Language Model https://www.microsoft.com/en-us/research/uploads/prod/2022/05/2206.11309.pdf
8
+ """
9
+
10
+ ! pip install transformers gradio -q
11
+
12
+ !pip install huggingface_hub
13
+ from huggingface_hub import notebook_login
14
+
15
+ # Log in to Hugging Face
16
+ notebook_login()
17
+
18
+ """# Step 1 — Setting up the Chatbot Model - Microsoft phi-3.5"""
19
+
20
+ from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
21
+ import torch
22
+
23
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/GODEL-v1_1-base-seq2seq")
24
+ model = AutoModelForSeq2SeqLM.from_pretrained("microsoft/GODEL-v1_1-base-seq2seq")
25
+
26
+ """# Step 2 — Defining a `predict` function with `state` and model prediction"""
27
+
28
+ def predict(input, history=[]):
29
+
30
+ instruction = 'Instruction: given a dialog context, you need to response empathically'
31
+
32
+ knowledge = ' '
33
+
34
+ s = list(sum(history, ()))
35
+
36
+ s.append(input)
37
+
38
+ #print(s)
39
+
40
+ dialog = ' EOS ' .join(s)
41
+
42
+ #print(dialog)
43
+
44
+ query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
45
+
46
+ top_p = 0.9
47
+ min_length = 8
48
+ max_length = 64
49
+
50
+
51
+ # tokenize the new input sentence
52
+ new_user_input_ids = tokenizer.encode(f"{query}", return_tensors='pt')
53
+
54
+
55
+ output = model.generate(new_user_input_ids, min_length=int(
56
+ min_length), max_length=int(max_length), top_p=top_p, do_sample=True).tolist()
57
+
58
+
59
+ response = tokenizer.decode(output[0], skip_special_tokens=True)
60
+
61
+
62
+ history.append((input, response))
63
+
64
+ return history, history
65
+
66
+ """# Step 3 — Creating a Gradio Chatbot UI"""
67
+
68
+ import gradio as gr
69
+
70
+
71
+ gr.Interface(fn=predict,
72
+ inputs=["text",'state'],
73
+ outputs=["chatbot",'state']).launch(debug = True, share = True)
74
+
readme.md ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Building a Chatbot UI with Microsoft GODEL and Gradio
2
+
3
+ This project demonstrates how to create a conversational AI chatbot using Microsoft's GODEL model and deploy it with a Gradio UI. The chatbot is designed to respond empathetically in a dialogue context.
4
+
5
+ ## Features
6
+
7
+ - **Empathetic Dialogues**: Utilizes Microsoft's GODEL model to generate empathetic responses.
8
+ - **Gradio UI**: Simple and interactive web interface for the chatbot.
9
+ - **Hugging Face Integration**: Easily deploy and manage your model using Hugging Face's platform.
10
+
11
+ ## Installation
12
+
13
+ To run this project, you'll need to have Python installed. You can install the required packages using the following command:
14
+
15
+ ```bash
16
+ pip install -r requirements.txt
requierements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ transformers==4.33.2
2
+ gradio==3.30.0
3
+ huggingface_hub==0.17.1
4
+ torch>=1.13.1