prithivMLmods commited on
Commit
bb80c1f
β€’
1 Parent(s): 66ef9bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -79
app.py CHANGED
@@ -1,25 +1,59 @@
 
1
  import gradio as gr
2
- from openai import OpenAI
3
- import os
4
  from fpdf import FPDF
5
  import docx
6
 
7
  css = '''
8
- .gradio-container{max-width: 890px !important}
9
  h1{text-align:center}
10
  footer {
11
  visibility: hidden
12
  }
13
  '''
14
 
15
- ACCESS_TOKEN = os.getenv("HF_TOKEN")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
- client = OpenAI(
18
- base_url="https://api-inference.huggingface.co/v1/",
19
- api_key=ACCESS_TOKEN,
20
- )
 
 
 
 
 
 
21
 
22
- # Function to save generated text to a file
 
 
 
 
 
 
 
 
 
 
23
  def save_file(content, filename, file_format):
24
  if file_format == "pdf":
25
  pdf = FPDF()
@@ -42,74 +76,33 @@ def save_file(content, filename, file_format):
42
  else:
43
  raise ValueError("Unsupported file format")
44
 
45
- # Respond function with file saving
46
- def respond(
47
- message,
48
- history: list[tuple[str, str]],
49
- system_message,
50
- max_tokens,
51
- temperature,
52
- top_p,
53
- ):
54
- messages = [{"role": "system", "content": system_message}]
55
-
56
- for val in history:
57
- if val[0]:
58
- messages.append({"role": "user", "content": val[0]})
59
- if val[1]:
60
- messages.append({"role": "assistant", "content": val[1]})
61
-
62
- messages.append({"role": "user", "content": message})
63
-
64
- response = ""
65
-
66
- for message in client.chat.completions.create(
67
- model="meta-llama/Meta-Llama-3.1-70B-Instruct",
68
- max_tokens=max_tokens,
69
- stream=True,
70
- temperature=temperature,
71
- top_p=top_p,
72
- messages=messages,
73
- ):
74
- token = message.choices[0].delta.content
75
- response += token
76
- yield response
77
-
78
- return response, history + [(message, response)]
79
-
80
- # Function to handle file saving after generation
81
- def save_generated_file(response, filename, file_format):
82
- saved_file = save_file(response, filename, file_format)
83
- return saved_file
84
-
85
- # Gradio interface using Blocks
86
- with gr.Blocks(css=css) as demo:
87
- system_message = gr.Textbox(value="", label="System message")
88
- max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
89
- temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
90
- top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-P")
91
- filename = gr.Textbox(value="output", label="Filename")
92
- file_format = gr.Radio(["pdf", "docx", "txt"], label="File Format", value="pdf")
93
-
94
- message = gr.Textbox(label="User Message")
95
- chat_history = gr.State(value=[])
96
- response_output = gr.Textbox(label="Generated Response")
97
- file_output = gr.File(label="Download File")
98
-
99
- generate_button = gr.Button("Generate")
100
- save_button = gr.Button("Save to File")
101
-
102
- generate_button.click(
103
- respond,
104
- inputs=[message, chat_history, system_message, max_tokens, temperature, top_p],
105
- outputs=[response_output, chat_history]
106
- )
107
-
108
- save_button.click(
109
- save_generated_file,
110
- inputs=[response_output, filename, file_format],
111
- outputs=[file_output]
112
- )
113
 
114
- if __name__ == "__main__":
115
- demo.launch()
 
1
+ from huggingface_hub import InferenceClient
2
  import gradio as gr
 
 
3
  from fpdf import FPDF
4
  import docx
5
 
6
  css = '''
7
+ .gradio-container{max-width: 1000px !important}
8
  h1{text-align:center}
9
  footer {
10
  visibility: hidden
11
  }
12
  '''
13
 
14
+ client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
15
+
16
+ def format_prompt(message, history, system_prompt=None):
17
+ prompt = "<s>"
18
+ for user_prompt, bot_response in history:
19
+ prompt += f"[INST] {user_prompt} [/INST]"
20
+ prompt += f" {bot_response}</s> "
21
+ if system_prompt:
22
+ prompt += f"[SYS] {system_prompt} [/SYS]"
23
+ prompt += f"[INST] {message} [/INST]"
24
+ return prompt
25
+
26
+ # Generate text
27
+ def generate(
28
+ prompt, history, system_prompt=None, temperature=0.2, max_new_tokens=1024, top_p=0.95, repetition_penalty=1.0,
29
+ ):
30
+ temperature = float(temperature)
31
+ if temperature < 1e-2:
32
+ temperature = 1e-2
33
+ top_p = float(top_p)
34
 
35
+ generate_kwargs = dict(
36
+ temperature=temperature,
37
+ max_new_tokens=max_new_tokens,
38
+ top_p=top_p,
39
+ repetition_penalty=repetition_penalty,
40
+ do_sample=True,
41
+ seed=42,
42
+ )
43
+
44
+ formatted_prompt = format_prompt(prompt, history, system_prompt)
45
 
46
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
47
+ output = ""
48
+
49
+ for response in stream:
50
+ output += response.token.text
51
+ # Clean up </s> tags from the generated output
52
+ output = output.replace("</s>", "")
53
+ yield output
54
+ return output
55
+
56
+ # Save the generated content to a file
57
  def save_file(content, filename, file_format):
58
  if file_format == "pdf":
59
  pdf = FPDF()
 
76
  else:
77
  raise ValueError("Unsupported file format")
78
 
79
+ # Combine generate and save file functions
80
+ def generate_and_save(prompt, history, filename="output", file_format="pdf", system_prompt=None, temperature=0.2, max_new_tokens=1024, top_p=0.95, repetition_penalty=1.0):
81
+ generated_text = ""
82
+ for output in generate(prompt, history, system_prompt, temperature, max_new_tokens, top_p, repetition_penalty):
83
+ generated_text = output
84
+ # Ensure </s> tags are removed from the final output
85
+ generated_text = generated_text.replace("</s>", "")
86
+ saved_file = save_file(generated_text, filename, file_format)
87
+ return generated_text, history + [(prompt, generated_text)], saved_file
88
+
89
+ # Create Gradio interface
90
+ demo = gr.Interface(
91
+ fn=generate_and_save,
92
+ inputs=[
93
+ gr.Textbox(placeholder="Type your message here...", label="Prompt"),
94
+ gr.State(value=[]), # history
95
+ gr.Textbox(placeholder="Filename (default: output)", label="Filename", value="output"),
96
+ gr.Radio(["pdf", "docx", "txt"], label="File Format", value="pdf"),
97
+ ],
98
+ outputs=[
99
+ gr.Textbox(label="Generated Text"),
100
+ gr.State(value=[]), # history
101
+ gr.File(label="Download File")
102
+ ],
103
+ css=css,
104
+ title="",
105
+ theme="bethecloud/storj_theme"
106
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
 
108
+ demo.queue().launch(show_api=False)