Metadatagen / app.py
Gasia's picture
Add app.py and requirements.txt
1d6ee52
raw
history blame contribute delete
No virus
10.8 kB
import streamlit as st
import openai
import base64
import requests
import os
import re
import pandas as pd
import random
import json
import subprocess
# Function to save API key and image directory to a file
def save_settings(api_key, image_directory):
with open('settings.txt', 'w') as f:
f.write(f"{api_key}\n{image_directory}")
# Function to load API key and image directory from a file
def load_settings():
try:
with open('settings.txt', 'r') as f:
lines = f.readlines()
api_key = lines[0].strip() if len(lines) > 0 else ''
image_directory = lines[1].strip() if len(lines) > 1 else ''
return api_key, image_directory
except FileNotFoundError:
return '', ''
# Function to initialize OpenAI API key
def init_openai_api_key(api_key):
openai.api_key = api_key
# Function to call GPT API
def call_gpt_api(prompt, temperature=0.7, max_tokens=None):
params = {
"model": "gpt-3.5-turbo-0301",
"messages": [
{"role": "system", "content": "You are a helpful assistant that generates metadata."},
{"role": "user", "content": prompt}
],
"temperature": temperature
}
if max_tokens is not None:
params["max_tokens"] = max_tokens
response = openai.ChatCompletion.create(**params)
return response.choices[0].message['content']
# Function to call GPT-4o for vision capabilities
def call_gpt_4o_vision(image_path, api_key):
with open(image_path, 'rb') as image_file:
image_data = image_file.read()
image_base64 = base64.b64encode(image_data).decode('utf-8')
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key}"
}
payload = {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": [
{"type": "text", "text": "Generate a concise name and detailed context for this image, ensuring response fits within 77 tokens. No copyright, No colon (:), trademarks, privacy rights, property rights, no number, no ensuring organization and clarity. No quotation marks or dashes, using commas for separation. Focuses on straightforward, richly descriptive titles without vague language or mentioning camera specifics or photography techniques. Ensure the response is a single line."},
{"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{image_base64}"}}
]
}
],
"max_tokens": 77
}
response = requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
response_json = response.json()
if 'choices' in response_json and len(response_json['choices']) > 0:
return response_json['choices'][0]['message']['content'].replace('\n', ' ')
else:
return "No description available"
# Function to clean and process metadata
def clean_metadata(description):
description = re.split(r'Job ID:|--ar \d+:\d+', description)[0].strip()
description = description.replace('"', '').replace("'", '')
return description
# Function to clean numbers and periods from keywords
def clean_numbers_and_periods(keywords):
cleaned_keywords = []
for keyword in keywords:
cleaned_keyword = re.sub(r'\d+|\.', '', keyword).strip()
if cleaned_keyword:
cleaned_keywords.append(cleaned_keyword)
return cleaned_keywords
# Function to generate keywords with retries
def generate_keywords_with_retries(description, keywords_rule, min_keywords=49, max_keywords=49, retries=1):
keywords_set = set()
for _ in range(retries):
keywords_prompt = f"{keywords_rule}\nDescription: {description}"
keywords = call_gpt_api(keywords_prompt).strip()
keywords_list = [k.strip() for k in keywords.split(',')]
keywords_list = clean_numbers_and_periods(keywords_list)
keywords_set.update(keywords_list)
if len(keywords_set) >= min_keywords:
break
keywords_list = list(keywords_set)
if len(keywords_list) > max_keywords:
keywords_list = random.sample(keywords_list, max_keywords)
elif len(keywords_list) < min_keywords:
additional_keywords_needed = min_keywords - len(keywords_list)
for _ in range(retries):
keywords_prompt = f"{keywords_rule}\nDescription: {description}"
keywords = call_gpt_api(keywords_prompt).strip()
keywords_list.extend([k.strip() for k in keywords.split(',') if k.strip() not in keywords_list])
keywords_list = clean_numbers_and_periods(keywords_list)
if len(keywords_list) >= min_keywords:
break
keywords_list = keywords_list[:max_keywords]
return ', '.join(keywords_list)
# Function to generate concise names and detailed contexts
def generate_concise_names(description):
concise_names_rule = (
"You are a creative assistant. Generate a concise name and detailed context for this image, ensuring response fits within 77 tokens. "
"No copyright, No colon (:), trademarks, privacy rights, property rights, no number, no ensuring organization and clarity. "
"No quotation marks or dashes, using commas for separation. Focuses on straightforward, richly descriptive titles without vague language or mentioning camera specifics or photography techniques. "
"Ensure the response is a single line. if it's too long, make it short and fit."
)
concise_names_prompt = f"{concise_names_rule}\nDescription: {description}"
result = call_gpt_api(concise_names_prompt, temperature=0.7, max_tokens=77).strip()
result = result.replace('Title:', '').strip()
result = result.replace('Name:', '').strip()
result = result.replace('"', '')
result = result.replace('\n', ' ')
return result
# Function to generate metadata
def generate_metadata(description, use_concise_names):
if use_concise_names and not description:
return None
keywords_rule = (
"Generate 40-49 single-word keywords for a microstock image. Ensure diversity by creatively linking "
"first ten must come from input name, the rest is related concepts (e.g., cross leads to religion, christ). For a black cat, use black, cat, etc. Avoid plurals, and it must be on the same horizontal row"
"format with commas. Don't generate numbers such as 1. Butterfly 2. Open 3. Pages 4. White"
"don't use photography techniques. No copyright, No trademarks, "
"No privacy rights, or property rights."
)
title = generate_concise_names(description) if use_concise_names else clean_metadata(description)
if not title:
return None
title = title.replace('\n', ' ')
keywords = generate_keywords_with_retries(description, keywords_rule)
metadata = {'title': title, 'keywords': keywords}
return metadata
def read_image(image_path):
description = ""
try:
if hasattr(sys, '_MEIPASS'):
exiftool_path = os.path.join(sys._MEIPASS, 'exiftool', 'exiftool.exe')
else:
exiftool_path = os.path.join(os.path.dirname(__file__), 'exiftool', 'exiftool.exe')
result = subprocess.run([exiftool_path, '-j', image_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode != 0:
return None, ""
metadata_list = json.loads(result.stdout.decode('utf-8'))
if metadata_list:
metadata = metadata_list[0]
description = (metadata.get("Caption-Abstract") or
metadata.get("ImageDescription") or
metadata.get("Description") or
metadata.get("XPComment") or
metadata.get("UserComment") or "")
if description:
description = re.split(r'Job ID:|--ar \d+:\d+', description)[0].strip()
except Exception as e:
pass
return None, description
def generate_metadata_for_images(directory, use_repeating_mode, use_concise_names, use_gpt_4o, api_key):
metadata_list = []
files = [f for f in os.listdir(directory) if f.lower().endswith(('png', 'jpg', 'jpeg'))]
title_keywords_map = {}
for filename in files:
file_path = os.path.join(directory, filename)
if use_gpt_4o:
description = call_gpt_4o_vision(file_path, api_key)
else:
_, description = read_image(file_path)
if description:
title = generate_concise_names(description) if use_concise_names else clean_metadata(description)
if use_repeating_mode and title in title_keywords_map:
keywords = title_keywords_map[title]
else:
metadata = generate_metadata(description, use_concise_names)
title_keywords_map[title] = metadata['keywords']
keywords = metadata['keywords']
metadata_list.append({'Filename': filename, 'Title': title, 'Keywords': keywords})
return metadata_list
# Streamlit UI
st.title("Metadata Generator by Gasia")
st.write("อย่าลืมเปลี่ยนเป็นภาษาอังกฤษ")
api_key = st.text_input("OpenAI API Key")
image_directory = st.text_input("Images Folder")
use_repeating_mode = st.checkbox("Enable Save Mode", value=True)
use_concise_names = st.checkbox("Generate Concise Names")
use_gpt_4o = st.checkbox("Use GPT-4o Vision")
convert_to_png = st.checkbox("Convert .JPG/.JPEG to .PNG")
if st.button("Submit"):
if not api_key or not image_directory:
st.warning("Please fill in all fields")
else:
save_settings(api_key, image_directory)
init_openai_api_key(api_key)
metadata_list = generate_metadata_for_images(image_directory, use_repeating_mode, use_concise_names, use_gpt_4o, api_key)
if convert_to_png:
for metadata in metadata_list:
if metadata['Filename'].lower().endswith(('.jpg', '.jpeg')):
metadata['Filename'] = re.sub(r'\.(jpg|jpeg)$', '.png', metadata['Filename'], flags=re.IGNORECASE)
if metadata_list:
df = pd.DataFrame(metadata_list)
st.write(df)
csv = df.to_csv(index=False).encode('utf-8')
st.download_button(
label="Download metadata as CSV",
data=csv,
file_name='image_metadata.csv',
mime='text/csv',
)
if st.button("Reset Settings"):
api_key = ''
image_directory = ''
save_settings(api_key, image_directory)
st.success("Settings have been reset to default values")