ai / app.py
netman19731's picture
Update app.py
15f4e76 verified
raw
history blame
No virus
3.42 kB
#有2个embedding库,HuggingFaceHubEmbeddings和HuggingFaceBgeEmbeddings,分别是云端和本地的,
#对应pinecone有2个index,分别是myindex01和myindex
import gradio as gr
import requests
import dashscope
from http import HTTPStatus
import json
# from langchain.llms import Tongyi
from langchain_community.llms import Tongyi,ChatGLM ,OpenAI
from langchain import hub
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain.tools import tool
# from langchain_community.embeddings import TensorflowHubEmbeddings
from langchain_community.embeddings import HuggingFaceBgeEmbeddings
from langchain_community.embeddings import HuggingFaceHubEmbeddings
from langchain_community.llms import HuggingFaceHub
from pinecone import Pinecone, ServerlessSpec
# from langchain.vectorstores import Pinecone as Pinecone_VectorStore
from langchain_community.vectorstores import Pinecone as Pinecone_VectorStore
from langchain.tools.retriever import create_retriever_tool
from langchain.agents import AgentExecutor,create_react_agent
from getpass import getpass
import os
os.environ['TAVILY_API_KEY'] = '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'
@tool
def tqyb(query: str) -> str:
"""这是天气预报api,示例query=北京"""
url=f"https://api.seniverse.com/v3/weather/now.json?key=SWtPLxs4A2GhenWC-&location={query}&language=zh-Hans&unit=c"
response = requests.get(url)
# 检查请求是否成功
if response.status_code == 200:
res=response.json()
return res # 假设API返回的是JSON格式数据
else:
return f"请求失败,状态码:{response.status_code}"
os.environ['OPENAI_API_KEY']="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"
# llm = HuggingFaceHub(repo_id="Qwen/Qwen1.5-0.5B", model_kwargs={"temperature": 0.5, "max_length": 64})
llm = Tongyi(dashscope_api_key="sk-78c45d761ed04af2b965b43cd522108b",model="qwen-72b")
prompt = hub.pull("hwchase17/react")
search = TavilySearchResults(max_results=1)
model_name = "BAAI/bge-small-en"
model_kwargs = {"device": "cpu"}
encode_kwargs = {"normalize_embeddings": True}
# embeddings = HuggingFaceBgeEmbeddings(
# model_name=model_name, model_kwargs=model_kwargs, encode_kwargs=encode_kwargs
# )
embeddings = HuggingFaceHubEmbeddings()
pc = Pinecone(api_key='3538cd3c-eca8-4c61-9463-759f5ea65b10')
index = pc.Index("myindex01")
vectorstore = Pinecone_VectorStore(index, embeddings.embed_query, "text")
db=vectorstore.as_retriever()
retriever_tool = create_retriever_tool(
db,
"shuangcheng_search",
"关于双城的区情信息检索工具,如果问题与双城的区情有关,你必须使用这个工具!",
)
tools = [search,tqyb,retriever_tool]
agent = create_react_agent(llm, tools, prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
async def predict(question):
que={"input":question}
res=agent_executor.invoke(que)
if res:
return(res["output"])
else:print("不好意思,出了一个小问题,请联系我的微信:13603634456")
gr.Interface(
predict,inputs="textbox",
outputs="textbox",
title="定制版AI专家BOT",
description="这是一个定制版的AI专家BOT,你可以通过输入问题,让AI为你回答。\n目前提供三个示例工具:\n1.天气预报(函数调用API)\n2.双城区情检索(增强型检索RAG)\n3.搜索引擎").launch()