import gradio as gr import json from datetime import datetime from theme import TufteInspired import glob import os import uuid from pathlib import Path import spaces import torch import transformers from huggingface_hub import CommitScheduler, hf_hub_download, login from transformers import AutoTokenizer, AutoModelForCausalLM from outlines import models, generate from gradio import update model_id = "meta-llama/Meta-Llama-3-8B-Instruct" tokenizer = AutoTokenizer.from_pretrained(model_id, add_special_tokens=True) @spaces.GPU(duration=120) def generate_blurb(history): model = models.transformers(model_id) generator = generate.text(model) resp = generator("Write a blurb for a book") return resp # Function to log blurb and vote def log_blurb_and_vote(blurb, vote): log_entry = {"timestamp": datetime.now().isoformat(), "blurb": blurb, "vote": vote} with open("blurb_log.jsonl", "a") as f: f.write(json.dumps(log_entry) + "\n") return f"Logged: {vote}" # Create custom theme tufte_theme = TufteInspired() # Create Gradio interface with gr.Blocks(theme=tufte_theme) as demo: gr.Markdown("

Would you read it?

") gr.Markdown( "Click the button to generate a blurb for a made-up book, then vote on its quality." ) with gr.Row(): generate_btn = gr.Button("Write a Blurb", variant="primary") blurb_output = gr.Textbox(label="Generated Blurb", lines=5, interactive=False) with gr.Row(): upvote_btn = gr.Button("👍 would read") downvote_btn = gr.Button("👎 wouldn't read") vote_output = gr.Textbox(label="Vote Status", interactive=False) generate_btn.click(generate_blurb, outputs=blurb_output) upvote_btn.click( lambda x: log_blurb_and_vote(x, "upvote"), inputs=blurb_output, outputs=vote_output, ) downvote_btn.click( lambda x: log_blurb_and_vote(x, "downvote"), inputs=blurb_output, outputs=vote_output, ) if __name__ == "__main__": demo.launch()