-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathllm.py
75 lines (64 loc) · 2.02 KB
/
llm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
from openai import AsyncOpenAI
conf = {
"openai": (
"https://api.openai.com/v1",
"sk-***",
"chatgpt-4o-latest"
),
"tyqw": (
"https://dashscope.aliyuncs.com/compatible-mode/v1",
"sk-***",
"qwen-turbo-latest"
),
"zpai": (
"https://open.bigmodel.cn/api/paas/v4",
"sk-***",
"GLM-4-Flash"
),
"zjdb": (
"https://ark.cn-beijing.volces.com/api/v3",
"sk-***",
"ep-***" #doubao-lite-4k
)
}
# 创建客户端字典
clients = {}
for name, (base_url, api_key, _) in conf.items():
clients[name] = AsyncOpenAI(base_url=base_url, api_key=api_key)
async def call_llm(name: str, message: str, stream: bool = True):
"""统一的异步LLM调用接口,支持流式输出"""
try:
if name not in conf:
raise ValueError(f"未知的LLM名称: {name}")
client = clients[name]
_, _, model = conf[name]
response = await client.chat.completions.create(
model=model,
messages=[{"role": "user", "content": message}],
stream=stream,
temperature=0.7,
max_tokens=1000
)
if stream:
async for chunk in response:
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
else:
yield response.choices[0].message.content
except Exception as e:
error_msg = f"调用{name}出错: {str(e)}"
print(error_msg) # 打印错误日志
yield error_msg
async def test_llm():
"""测试函数"""
message = "你好,请用一句话介绍你自己"
for llm_name in conf.keys():
print(f"\n测试 {llm_name}:")
try:
async for chunk in call_llm(llm_name, message):
print(chunk, end="", flush=True)
except Exception as e:
print(f"错误: {str(e)}")
if __name__ == "__main__":
import asyncio
asyncio.run(test_llm())