from langchain_openai import ChatOpenAI from langchain.chains import ConversationChain from langchain.memory import ConversationBufferMemory def get_chat_response(prompt, memory,openai_api_key): model = ChatOpenAI(model="gpt-3.5-turbo", openai_api_key = openai_api_key, base_url= "https://api.aigc369.com/v1") chain = ConversationChain(llm=model, memory=memory) response = chain.invoke({"input":prompt}) return response["response"] # memory = ConversationBufferMemory(return_messages=True) # print(get_chat_response("北京有谁在呀?",memory, openai_api_key="sk-YWPVrZ0KM3B3BFVk40Ee827a2dE34124Af34F881Bf4d79B6")) # print(get_chat_response("我的上一个问题是什么?",memory, openai_api_key="sk-YWPVrZ0KM3B3BFVk40Ee827a2dE34124Af34F881Bf4d79B6"))