forked from cycneuramus/signal-aichat
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathai.py
139 lines (105 loc) · 3.95 KB
/
ai.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
import os
import re
from collections import deque
import openai
from Bard import Chatbot as Bard
from EdgeGPT import Chatbot as Bing
from EdgeGPT import ConversationStyle
from hugchat import hugchat
MODELS = ["bard", "bing", "gpt", "hugchat", "llama"]
class ChatHistory:
def __init__(self, msg_limit):
self.stack = deque(maxlen=msg_limit)
def append(self, msg):
return self.stack.append(msg)
def get_as_list(self):
return list(self.stack)
class ChatModel:
def __init__(self, model):
assert (
model in MODELS
), f"value attribute to {__class__.__name__} must be one of {MODELS}"
self.model = model
self.trigger = f"!{model}"
self.api = self.get_api()
def get_api(self):
if self.model == "bing":
return BingAPI(
cookie_path="./config/bing.json",
conversation_style=ConversationStyle.balanced,
)
if self.model == "bard":
token = os.getenv("BARD_TOKEN")
return BardAPI(token)
if self.model == "gpt":
openai_api_key = os.getenv("OPENAI_API_KEY")
openai_api_base = (
os.getenv("OPENAI_API_BASE") or "https://api.openai.com/v1"
)
return OpenAIAPI(api_key=openai_api_key, api_base=openai_api_base)
if self.model == "hugchat":
return HugchatAPI(cookie_path="./config/hugchat.json")
if self.model == "llama":
llama_api_key = "this_can_be_anything"
llama_api_base = os.getenv("LLAMA_API_BASE")
return OpenAIAPI(api_key=llama_api_key, api_base=llama_api_base)
class BardAPI:
def __init__(self, token):
self.chat = Bard(token)
async def send(self, text):
return self.chat.ask(text)
class BingAPI:
def __init__(self, cookie_path, conversation_style):
self.conversation_style = conversation_style
self.chat = Bing(cookie_path=cookie_path)
def _cleanup_footnote_marks(self, response):
return re.sub(r"\[\^(\d+)\^\]", r"[\1]", response)
def _parse_sources(self, sources_raw):
name = "providerDisplayName"
url = "seeMoreUrl"
sources = ""
for i, source in enumerate(sources_raw, start=1):
if name in source.keys() and url in source.keys():
sources += f"[{i}]: {source[name]}: {source[url]}\n"
else:
continue
return sources
async def send(self, text):
data = await self.chat.ask(prompt=text)
sources_raw = data["item"]["messages"][1]["sourceAttributions"]
if sources_raw:
sources = self._parse_sources(sources_raw)
else:
sources = ""
response_raw = data["item"]["messages"][1]["text"]
response = self._cleanup_footnote_marks(response_raw)
if sources:
return f"{response}\n\n{sources}"
else:
return response
class HugchatAPI:
def __init__(self, cookie_path):
self.chat = hugchat.ChatBot(cookie_path=cookie_path)
async def send(self, text):
return self.chat.chat(text)
class OpenAIAPI:
def __init__(
self, api_key, api_base, model="gpt-3.5-turbo", max_history=5, max_tokens=1024
):
self.model = model
self.history = ChatHistory(max_history)
self.max_tokens = max_tokens
self.api_key = api_key
self.api_base = api_base
async def send(self, text):
openai.api_key = self.api_key
openai.api_base = self.api_base
new_message = {"role": "user", "content": text}
self.history.append(new_message)
messages = self.history.get_as_list()
response = openai.ChatCompletion.create(
model=self.model, messages=messages, max_tokens=self.max_tokens
)
self.history.append(response.choices[0].message)
response = response.choices[0].message.content
return response.strip()