File size: 5,138 Bytes
d18942e
 
 
 
 
 
 
 
61ced1b
d18942e
94f448e
d18942e
94f448e
d18942e
94f448e
d18942e
 
61ced1b
94f448e
61ced1b
d18942e
 
 
61ced1b
d18942e
 
 
 
94f448e
 
 
64dacd5
94f448e
 
 
 
 
 
 
d18942e
 
 
61ced1b
d18942e
 
 
 
 
238ce74
d18942e
 
65e6974
 
 
 
d18942e
 
 
 
 
 
 
 
 
 
 
8f59367
 
 
 
 
 
 
 
d18942e
 
 
 
 
 
a52f49c
d18942e
 
 
a52f49c
d18942e
65e6974
d18942e
 
 
 
c86d108
 
 
 
 
 
 
 
 
 
 
 
 
 
d18942e
94f448e
 
 
 
d9d11cd
94f448e
d9d11cd
94f448e
 
 
 
d18942e
61ced1b
d18942e
94f448e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d18942e
61ced1b
94f448e
 
d18942e
94f448e
 
 
 
d9d11cd
94f448e
 
 
d9d11cd
d18942e
 
 
61ced1b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import os
import time
import spaces
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer, BitsAndBytesConfig
import gradio as gr
from threading import Thread

MODEL_LIST = ["nawhgnuj/DonaldTrump-Llama-3.1-8B-Chat"]
HF_TOKEN = os.environ.get("HF_TOKEN", None)
MODEL = os.environ.get("MODEL_ID", "nawhgnuj/DonaldTrump-Llama-3.1-8B-Chat")

TITLE = "<h1 style='color: #B71C1C; text-align: center;'>Donald Trump Chatbot</h1>"

TRUMP_AVATAR = "https://upload.wikimedia.org/wikipedia/commons/5/56/Donald_Trump_official_portrait.jpg"

CSS = """
.chatbot {
    background-color: white;
}
.duplicate-button {
    margin: auto !important;
    color: white !important;
    background: #B71C1C !important;
    border-radius: 100vh !important;
}
h3 {
    text-align: center;
    color: #B71C1C;
}
.contain {object-fit: contain}
.avatar {width: 80px; height: 80px; border-radius: 50%; object-fit: cover;}
.user-message {
    background-color: white !important;
    color: black !important;
}
.bot-message {
    background-color: #B71C1C !important;
    color: white !important;
}
"""

device = "cuda" if torch.cuda.is_available() else "cpu"

quantization_config = BitsAndBytesConfig(
    load_in_4bit=True,
    bnb_4bit_compute_dtype=torch.bfloat16,
    bnb_4bit_use_double_quant=True,
    bnb_4bit_quant_type="nf4")

tokenizer = AutoTokenizer.from_pretrained(MODEL)
if tokenizer.pad_token is None:
    tokenizer.pad_token = tokenizer.eos_token
    tokenizer.pad_token_id = tokenizer.eos_token_id
    
model = AutoModelForCausalLM.from_pretrained(
    MODEL,
    torch_dtype=torch.bfloat16,
    device_map="auto",
    quantization_config=quantization_config)

@spaces.GPU()
def stream_chat(
    message: str, 
    history: list,
):
    system_prompt = """You are a Donald Trump chatbot. You only answer like Trump in his style and tone, reflecting his unique speech patterns. Incorporate the following characteristics in every response: 
    1. repeat key phrases for emphasis, use strong superlatives like 'tremendous' and 'fantastic,' attack opponents where appropriate (e.g., 'fake news media,' 'radical left')
    2. focus on personal successes ('nobody\u2019s done more than I have')
    3. keep sentences short and impactful, and show national pride. 
    4. Maintain a direct, informal tone, often addressing the audience as 'folks' and dismiss opposing views bluntly. 
    5. Repeat key phrases for emphasis, but avoid excessive repetition.
    Importantly, always respond to points in Trump's style. Keep responses concise and avoid unnecessary repetition.
    """

    conversation = [
        {"role": "system", "content": system_prompt}
    ]
    for prompt, answer in history:
        conversation.extend([
            {"role": "user", "content": prompt},
            {"role": "assistant", "content": answer},
        ])
    conversation.append({"role": "user", "content": message})
    
    input_ids = tokenizer.apply_chat_template(conversation, add_generation_prompt=True, return_tensors="pt").to(model.device)
    attention_mask = torch.ones_like(input_ids)
    
    streamer = TextIteratorStreamer(tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True)
    
    with torch.no_grad():
        output = model.generate(
            input_ids=input_ids,
            attention_mask=attention_mask,
            max_new_tokens=1024,
            do_sample=True,
            top_p=1.0,
            top_k=20,
            temperature=0.8,
            pad_token_id=tokenizer.pad_token_id,
            eos_token_id=tokenizer.eos_token_id,
        )
    
    response = tokenizer.decode(output[0][input_ids.shape[1]:], skip_special_tokens=True)
    return response.strip()

def add_text(history, text):
    history = history + [(text, None)]
    return history, ""

def bot(history):
    user_message = history[-1][0]
    bot_response = stream_chat(user_message, history[:-1])
    history[-1][1] = ""
    for character in bot_response:
        history[-1][1] += character
        yield history

with gr.Blocks(css=CSS, theme=gr.themes.Default()) as demo:
    gr.HTML(TITLE)
    chatbot = gr.Chatbot(
        [],
        elem_id="chatbot",
        avatar_images=(None, TRUMP_AVATAR),
        height=600,
        bubble_full_width=False,
        show_label=False,
    )
    msg = gr.Textbox(
        placeholder="Ask Donald Trump a question",
        container=False,
        scale=7
    )
    with gr.Row():
        submit = gr.Button("Submit", scale=1, variant="primary")
        clear = gr.Button("Clear", scale=1)

    gr.Examples(
        examples=[
            ["What's your stance on immigration?"],
            ["How would you describe your economic policies?"],
            ["What are your thoughts on the media?"],
        ],
        inputs=msg,
    )

    submit.click(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
        bot, chatbot, chatbot
    )
    clear.click(lambda: [], outputs=[chatbot], queue=False)
    msg.submit(add_text, [chatbot, msg], [chatbot, msg], queue=False).then(
        bot, chatbot, chatbot
    )

if __name__ == "__main__":
    demo.launch()