File size: 1,274 Bytes
a2320ed
b0a2a59
6ed43b1
4c1fe3c
9c084a1
79e7eb6
 
 
 
4c1fe3c
 
 
 
 
 
d1117d8
6ed43b1
79e7eb6
6ed43b1
79e7eb6
6ed43b1
d1117d8
4c1fe3c
 
 
a2320ed
6ed43b1
2f9e402
 
1fb8db8
 
 
 
2f9e402
 
 
79d06e4
2f9e402
f51b7a0
 
2f9e402
6ed43b1
 
 
79d06e4
 
 
 
 
6921d67
79d06e4
 
 
 
 
 
6ed43b1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import streamlit as st
import os
from transformers import pipeline, set_seed
from huggingface_hub import HfFolder

import transformers
import torch


# Ensure the HF_TOKEN environment variable is set correctly
HF_TOKEN = os.getenv('HF_TOKEN')
if HF_TOKEN:
    HfFolder.save_token(HF_TOKEN)
else:
    st.warning("HF_TOKEN is not set. Proceeding without a token.")

# Use a valid model identifie

#generator = pipeline("text-generation", model="openai-community/gpt2")

generator = pipeline('text-generation', model='gpt2-large')

st.title("Text Generation")
st.write("Enter your text below.")
text = st.text_area("Your input")

st.write("Enter seed.")
seed_input = st.text_area("Set seed")


st.write("Enter max length.")
maxLength = st.text_area("max length")

# Convert seed input to integer
try:
    seed = int(seed_input)
    max_length = int(maxLength)
except ValueError:
    seed = 1
    max_length = 100
    
set_seed(seed)


if st.button("Generate Text"):
    # Use default values or handle None appropriately
    if seed is not None:
        set_seed(seed)

    if text and max_length:
        # Generate text
        out = generator(text, max_length=max_length, num_return_sequences=1)
        st.json(out)
        st.write(f"Reply: {out[0]['generated_text']}")