在LangChain中调用清华智普大模型后台流式返回结果
ChatGLM_new.py
from langchain_openai import ChatOpenAI
import jwt
import time
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
zhipuai_api_key = "bdc59e310deb29d48e6be230d487c518.n88YR9GP76XUePoL"
def generate_token(apikey: str, exp_seconds: int):
try:
id, secret = apikey.split(".")
except Exception as e:
raise Exception("invalid apikey", e)
payload = {
"api_key": id,
"exp": int(round(time.time() * 1000)) + exp_seconds * 1000,
"timestamp": int(round(time.time() * 1000)),
}
return jwt.encode(
payload,
secret,
algorithm="HS256",
headers={"alg": "HS256", "sign_type": "SIGN"},
)
zhipu_llm = ChatOpenAI(
model_name="glm-4",
openai_api_base="https://open.bigmodel.cn/api/paas/v4",
openai_api_key=generate_token(zhipuai_api_key,10),
streaming=False,
verbose=True
)
# messages = [
# # AIMessage(content="Hi."),
# # SystemMessage(content="Your role is a poet."),
# # HumanMessage(content="深圳2008年的GDP多少亿"),
# HumanMessage(content="only give me the result,no other words:the result of add 3 to 4"),
# ]
# response = zhipu_llm.invoke(messages)
# print(response)
调用:
异步输出
from ChatGLM_new import zhipu_llm
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
messages = [
# AIMessage(content="Hi."),
# SystemMessage(content="Your role is a poet."),
HumanMessage(content="红楼梦里面有猪八戒吗"),
# HumanMessage(content="only give me the result,no other words:the result of add 3 to 4"),
]
zhipu_llm.streaming=True
# print(zhipu_llm)
for chunk in zhipu_llm.stream("猪八戒的爸爸是谁"):
# print(chunk.content, end="", flush=True)
print(chunk.content)
原文地址:https://blog.csdn.net/oHeHui1/article/details/136389922
免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!