terenceGPT / appOG.py
Jason St George
update gptapp
3573bf1
raw
history blame contribute delete
No virus
10.7 kB
import pickle
from typing import Optional, Tuple
import gradio as gr
from threading import Lock
from langchain import PromptTemplate
import os
os.environ["OPENAI_API_KEY"] = "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"
import openai
openai.api_key = "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"
vecpath = "terence_vectorstore.pkl"
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--data_directory', type=str, help='Path to directory containing data files to index')
parser.add_argument('-p', '--prompt', default='plant', help="String prompt template to use, must contain {question} and {context}", type=str)
parser.add_argument('-pp', '--prompt_path', type=str, help='Path to custom prompt template to use with LLM ChatBot + Vectorstore')
parser.add_argument('-t', '--temperature', type=float, default=0.7, help='LLM temperature setting... lower == more deterministic')
parser.add_argument('-m', '--max_tokens', type=int, default=500, help='LLM maximum number of output tokens')
parser.add_argument('-v', '--vectorstore_path', default=vecpath, type=str, help='Path to saved index')
parser.add_argument('-dv', '--live_vectorstore_path', default=r"indices\vectorstore_from_docs.pkl", type=str, help='Path to save temporary index')
parser.add_argument('-f', '--font_size', type=int, default=20, help='Chatbot window font size (default: 20px)')
parser.add_argument('-s', '--share', action='store_true', default=False, help="Host online and produce shareable link")
args = parser.parse_args()
from langchain.prompts.prompt import PromptTemplate
from langchain.llms import OpenAI
from langchain.chains import ChatVectorDBChain
_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
tqa2_template= """Terence is an ethnobotanist, shamanologist, and visionary. Terence studied art history, ancient languages, and conservation of natural resources.
Terence has traveled around searching for a viable, vibrant, numinous approach to spiritual reality and is interested in the idea of mind revelation to the world.
sTerence is an advocate for the shaman as a culture hero and was the central figure in the culture of rave and house music.
Terence McKenna's writing and speaking style is characterized by his passion and enthusiasm for the topics he explored.
He is known for his lucid and poetic language, which he used to convey complex ideas in an accessible way.
He is also a master of metaphor and analogy, which he used to illustrate his points and help the audience connect with the material.
Terence is known for his captivating use of cadence, pacing, and humor.
Terence will often make use of metaphor and allegory to illustrate your points, and his use of language is both vivid and imaginative.
Terence has a unique ability to blend together the scientific, mystical, and metaphysical, creating an engaging and thought-provoking blend of ideas.
Terence is also known for his willingness to explore and challenge the status quo.
He did not shy away from difficult topics, and is willing to push the boundaries of accepted knowledge and explore the unknown.
His openness and willingness to explore the unknown is a key part of his style, and is something that people can still strive to embody today.
Terence is given the following extracted parts of a long document and a question. Provide a conversational answer in the style of Terence McKenna.
Human: {question}
=========
{context}
=========
Terence:
"""
TMK_PROMPT = PromptTemplate(
template=tqa2_template, input_variables=["question", "context"]
)
###########################################################################################
from langchain import PromptTemplate
qa_template ="""Terence is a researcher that has been trained on a vast corpus of medical and anecdotal knowledge about psychedelics, their use, their medical benefits, mechanisms of action, as well as their historical use and applications.
Terence is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics, particularly about psychedelics and psychedelic research, chemistry, pharmacology, as well as shamanic and ritual use.
As a language model, Terence is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.
Terence can engage in discussions, reason about, and provide explanations for the potential implications of psychedelic research and answer questions that humans may have based on this knowledge and Terence's own speculations.
Overall, Terence is a powerful tool that can help with a wide range of tasks and provide valuable insights and information on psychedlics and the state of art modern research.
Whether you need help with a specific question or just want to have a conversation about a particular topic, Terence is here to assist.
Terence is given the following extracted parts of a long document and a question. Provide a conversational answer.
Human: {question}
=========
{context}
=========
Answer:"""
QA_PROMPT = PromptTemplate(template=qa_template, input_variables=["question", "context"])
###########################################################################################
def get_chain(vectorstore,
temperature=0.7,
max_tokens=384,
qa_prompt=TMK_PROMPT,
condense_prompt=CONDENSE_QUESTION_PROMPT,
prompt=None):
llm = OpenAI(temperature=temperature, max_tokens=max_tokens)
qa_chain = ChatVectorDBChain.from_llm(
llm,
vectorstore,
qa_prompt=qa_prompt,# or prompt,
condense_question_prompt=condense_prompt,
)
return qa_chain
LIVE_VECTORSTORE_PATH = args.live_vectorstore_path
DATA_DIRECTORY = args.data_directory
NEW_VECTORSTORE_SET = False
NEW_DIRECTORY_SET = False
# Attempt to load base vectorstore
try:
with open(args.vectorstore_path, "rb") as f:
VECTORSTORE = pickle.load(f)
# print("Loaded vectorstore from `{}`.".format(args.vectorstore_path))
chain = get_chain(
VECTORSTORE,
temperature=args.temperature,
max_tokens=args.max_tokens,
prompt=args.prompt
)
# print("Loaded LangChain...")
except:
VECTORSTORE = None
# print("NO vectorstore loaded. Flying blind")
def set_vectorstore(vectorstore_path: str):
if vectorstore_path is not None:
global VECTORSTORE, NEW_VECTORSTORE_SET
try:
with open(vectorstore_path, "rb") as f:
VECTORSTORE = pickle.load(f)
# print("Loaded `{}`".format(vectorstore_path))
NEW_VECTORSTORE_SET = True
chain = get_chain(
VECTORSTORE,
temperature=args.temperature,
max_tokens=args.max_tokens,
prompt=args.prompt
)
except:
VECTORSTORE = None
NEW_VECTORSTORE_SET = False
# print("NO vectorstore loaded. Reverting to original {}".format('vectorstore.pkl'))
return chain
def initialize_chain():
chain = get_chain(
VECTORSTORE,
temperature=args.temperature,
max_tokens=args.max_tokens,
prompt=args.prompt
)
# print("LangChain initialized!")
return chain
class ChatWrapper:
def __init__(self):
self.lock = Lock()
def __call__(
self, inp: str, history: Optional[Tuple[str, str]], chain, #, dirpath: Optional[str], vectorstore_path: Optional[str],
):
"""Execute the chat functionality."""
self.lock.acquire()
try:
history = history or []
# If chain is None, that is because it's the first pass and user didn't press Init.
if chain is None:
history.append(
(inp, "Please Initialize LangChain by clikcing 'Start Chain!'")
)
return history, history
# Run chain and append input.
output = chain({"question": inp, "chat_history": history})["answer"]
history.append((inp, output))
return history, history
except Exception as e:
raise e
finally:
self.lock.release()
return history, history
chat = ChatWrapper()
# block = gr.Blocks(css=".gradio-container {background-color: lightgray} .overflow-y-auto{height:500px}")
# block = gr.Blocks(css='body{background-image:url("https://upload.wikimedia.org/wikipedia/commons/7/7f/Mckenna1.jpg");}')
# css=".gradio-container {background-image: url('file=Mckenna1.jpg')}"
css=".gradio-container {background-color: lightgray} .overflow-y-auto{height:500px}"
css = """
img {
border: 1px solid #ddd;
border-radius: 4px;
padding: 5px;
width: 150px;
}
<img src="paris.jpg" alt="Paris">
"""
block = gr.Blocks(css=css)
with block:
gr.HTML("Please initialize the chain by clicking 'Start Chain!' before submitting a question.")
with gr.Row():
init_chain_button = gr.Button(value="Start Chain!", variant="primary").style(full_width=False)
chatbot = gr.Chatbot()
with gr.Row():
gr.Markdown("<h3><center>TerenceGPT</center></h3>")
with gr.Row():
message = gr.Textbox(
label="What's your question?",
placeholder="Ask Terence McKenna",
lines=1,
)
submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
with gr.Column():
with gr.Row():
gr.Image(type='filepath', value='McKenna3.jpg', shape=(200,100))
gr.HTML(
"<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain πŸ¦œοΈπŸ”— and Unicorn Farts πŸ¦„πŸ’¨</a></center>"
)
state = gr.State()
agent_state = gr.State()
submit.click(
chat,
inputs=[message, state, agent_state],
outputs=[chatbot, state]
)
message.submit(
chat,
inputs=[message, state, agent_state],
outputs=[chatbot, state]
)
message.submit(lambda :"", None, message)
init_chain_button.click(
initialize_chain,
inputs=[],
outputs=[agent_state],
show_progress=True
)
# block.launch(debug=True, share=args.share)
block.launch(debug=True)