1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
|
# 千问,Kimi,deepseek
# 一直有ssl 验证的ca调用后面会排查一下怎么跳过
class openai:
def __init__(self,a:auth) -> None:
self.key =a.sk
self.url =urlMap[a.type]
self.client = OpenAI(api_key=self.key,base_url=self.url)
self.history = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
self.model = ModelMap[a.type][0]
self.isMulti = False
def setModel(self,s:str):
self.model = s
def StreamCall(self,s:str,fn:Callable[[str],None]):
if not self.isMulti:
self.history = []
self.history.append({'role': 'user', 'content': f'{s}'})
cmp = self.client.chat.completions.create(
model=self.model,
messages=self.history, # type: ignore
stream=True
)
temp = ""
for chunk in cmp:
ctx = chunk.choices[0].delta.content
try:
if ctx and len(ctx)>0:
fn(ctx)
except Exception as e:
logging.debug(f"streamcall {e=}")
if ctx:
temp +=ctx
if self.isMulti:
self.history.append({'role': 'system', 'content': f'{temp}'})
# 百度千帆
class qf:
def __init__(self,sk=None,ak=None) -> None:
if sk==None or ak==None:
raise ValueError(f"{sk=},{ak=} is invalid")
self.client = qianfan.ChatCompletion(ak=ak,sk=sk)
self.model = ModelMap["千帆"][0]
self.history = []
self.isMulti =False
def setModel(self,s:str):
self.model = s
def StreamCall(self,s:str,fn:Callable[[str],None]):
temp = ""
if not self.isMulti:
self.history = []
self.history.append({"role":"user","content":f"{s}"})
resp = self.client.do(model=self.model,messages=self.history,stream=True)
for chunk in resp:
ctx = chunk["body"]["result"]
try:
if len(ctx)>0:
fn(ctx)
except Exception as e:
showMessage("错误",f"{e=}",None,None)
temp +=ctx
if self.isMulti:
self.history.append({'role': 'system', 'content': f'{temp}'})
import json
# 混元
from tencentcloud.common.common_client import CommonClient
from tencentcloud.common import credential
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.common.profile.http_profile import HttpProfile
class hy:
def __init__(self,sk=None,ak=None) -> None:
if sk==None or ak==None:
raise ValueError(f"{sk=},{ak=} is invalid")
self.model = ModelMap["混元"][0]
self.message = {"Messages":[],"Stream":True,"Model":self.model}
self.history =self.message["Messages"]
self.isMulti =False
cred = credential.Credential(ak, sk)
httpProfile = HttpProfile()
httpProfile.endpoint = "hunyuan.tencentcloudapi.com"
clientProfile = ClientProfile()
clientProfile.httpProfile = httpProfile
params = "{}";
self.client = CommonClient("hunyuan", "2023-09-01", cred, "", profile=clientProfile)
def setModel(self,s:str):
self.model = s
self.message["Model"] = self.model
def StreamCall(self,s:str,fn:Callable[[str],None]):
temp = ""
if not self.isMulti:
self.message["Messages"] = []
self.message["Messages"].append({"Role":"user","Content":f"{s}"})
resp = self.client.call_sse("ChatCompletions", self.message)
for chunk in resp:
ctx = json.loads(chunk["data"])["Choices"][0]["Delta"]["Content"]
# ctx = chunk["body"]["result"]
try:
if len(ctx)>0:
fn(ctx)
except Exception as e:
showMessage("错误",f"{e=}",None,None)
temp +=ctx
if self.isMulti:
self.history.append({'role': 'system', 'content': f'{temp}'})
# 豆包的这个是比较恶心的,在ak,sk,上还要一个modelid 形式的key
from volcenginesdkarkruntime import Ark
class db:
def __init__(self,sk=None,ak=None,model=None) -> None:
if sk==None or ak==None or model==None:
raise ValueError(f"{sk=},{ak=} is invalid")
self.client = Ark(ak=ak,sk=sk,base_url="https://ark.cn-beijing.volces.com/api/v3")
self.model = model
self.history = []
self.isMulti =False
def setModel(self,s:str):
self.model = s
def StreamCall(self,s:str,fn:Callable[[str],None]):
temp = ""
if not self.isMulti:
self.history = []
self.history.append({"role":"user","content":f"{s}"})
resp: Stream[ChatCompletionChunk] = self.client.chat.completions.create(model=self.model,messages=self.history,stream=True) # type: ignore
for chunk in resp:
if not chunk.choices:
continue
ctx: str = chunk.choices[0].delta.content # type: ignore
try:
if len(ctx)>0:
fn(ctx)
except Exception as e:
showMessage("错误",f"{e=}",None,None)
temp +=ctx
if self.isMulti:
self.history.append({'role': 'system', 'content': f'{temp}'})
|