File size: 1,802 Bytes
49952d4
 
 
 
 
 
 
 
8a11fad
 
 
 
 
 
 
 
32982c5
49952d4
 
 
 
 
32982c5
 
49952d4
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import gradio as gr
from transformers import pipeline

# Function to generate the story
def generate_story(title, model_name):
    # Use text-generation pipeline from Hugging Face
    generator = pipeline('text-generation', model=model_name)
    # Generate the story based on the input title
    story = generator(title,
                      max_length=230,  # Set the maximum length for the generated text (story) to 230 tokens
                      no_repeat_ngram_size=3,  # Avoid repeating any sequence of 3 words (to prevent repetitive text)
                      temperature=0.8,  # Introduce some randomness; higher values make the output more random, lower makes it more deterministic
                      top_p=0.95  # Use nucleus sampling (top-p sampling) to focus on the top 95% of probable words, making the text more coherent
                      )
    # Return the generated text
    return story[0]['generated_text']

# Create the Gradio interface using gr.Interface
demo = gr.Interface(
    fn=generate_story,  # The function to run
    inputs=[             # Inputs for the interface
        gr.Textbox(label="Enter Story Title", placeholder="Type a title here..."),  # Title input
        gr.Dropdown(choices=['gpt2', 'gpt2-large', 'EleutherAI/gpt-neo-2.7B', 'EleutherAI/gpt-j-6B',
                             'maldv/badger-writer-llama-3-8b', 'EleutherAI/gpt-neo-1.3B'],
                    value='gpt2',
                    label="Choose Model")  # Model selection input
    ],
    outputs=gr.Textbox(label="Generated Story", lines=10),  # Output for the generated story
    title="AI Story Generator",  # Title of the interface
    description="Enter a title and choose a model to generate a short story"  # A short description
)

# Launch the interface
demo.launch(share=True)