File size: 1,737 Bytes
1586e72
d2e8961
5d25df8
1586e72
c524b47
 
 
 
 
e21752a
1dcc0fa
 
 
66dbbb4
 
0dac12d
1dcc0fa
 
 
 
0dac12d
54cd3bc
4d82f11
 
0dac12d
1dcc0fa
54cd3bc
1e80bb4
1dcc0fa
1e80bb4
 
 
 
d2e8961
1e80bb4
 
 
1dcc0fa
1e80bb4
6f2a89a
1e80bb4
 
 
c524b47
1e80bb4
 
 
 
 
 
 
 
4d82f11
e21752a
1e80bb4
 
e21752a
 
54cd3bc
0dac12d
1e80bb4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import streamlit as st
from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering
import torch

from huggingface_hub import HfApi




#Sidebar menu
st.sidebar.title('Menu')

home = st.sidebar.checkbox("Home")
time_series = st.sidebar.checkbox('Time Series Data')
chatbot = st.sidebar.checkbox('Chatbot')

if home:
    st.title("Food Security in Africa and Asia")
    st.text("Hi there! I'm your food security assistant. Food security means everyone has access to safe, nutritious food to meet their dietary needs.\n" 
        "Want to learn more about food insecurity, its causes, or potential solutions?")

if time_series:
    st.header("Time series data from 2000 to 2022")
    st.text("This data was collected from trusted organizations and depict metrcis on food security based on climate change and food produduced")


if chatbot:
    
    st.header("Chat with me.")
    text = st.text_area("Food security is a global challenge. Let's work together to find solutions. How can I help you today?")

  
    tokenizer = AutoTokenizer.from_pretrained("google/gemma-2-9b-it")
    model = AutoModelForQuestionAnswering.from_pretrained(
    "google/gemma-2-9b-it",
    device_map="auto",
    torch_dtype=torch.bfloat16)
    
   
    
    if text:
        input_ids = tokenizer(text, return_tensors="pt").to("cuda")
        outputs = model.generate(**input_ids)
        st.write(tokenizer.decode(outputs[0]))




'''
if chatbot:
     
    st.header("Chat with me.")
    text = st.text_area("Food security is a global challenge. Let's work together to find solutions. How can I help you today?")

    pipe = pipeline("question-answering", model=model)
    
    if text:
        out = pipe(text)
        st.write(out)

'''