File size: 615 Bytes
d2a848f
75a5768
d2a848f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import pipeline
import gradio as gr
from PIL import Image
pipe = pipeline("image-to-text", model="Salesforce/blip-image-captioning-large")
def generate_caption(image):
    # Generate a caption for the image
    captions = pipe(image)
    return captions[0]['generated_text']
demo = gr.Interface(
    fn=generate_caption,
    inputs=gr.Image(type="pil", label="Upload an Image"),
    outputs=gr.Textbox(label="Generated Caption"),
title="Image Caption Generator",
    description="Upload an image to generate a caption using the BLIP model."
)
if __name__ == "__main__":
    demo.launch(share=True)