From 2bac49bf738a853c19dd405ac36deb5774757d1a Mon Sep 17 00:00:00 2001 From: hejl Date: Mon, 10 Jun 2024 11:49:29 +0800 Subject: [PATCH 1/6] add llama-3 and mixtral model for ai chat --- duckduckgo_search/duckduckgo_search.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index 8d75312..fea02a4 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -130,7 +130,12 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Returns: str: The response from the AI. """ - models = {"claude-3-haiku": "claude-3-haiku-20240307", "gpt-3.5": "gpt-3.5-turbo-0125"} + models = { + "claude-3-haiku": "claude-3-haiku-20240307", + "gpt-3.5": "gpt-3.5-turbo-0125", + "llama-3": "meta-llama/Llama-3-70b-chat-hf", + "mixtral": "mistralai/Mixtral-8x7B-Instruct-v0.1", + } # vqd if not self._chat_vqd: resp = self.client.get("https://duckduckgo.com/duckchat/v1/status", headers={"x-vqd-accept": "1"}) From dacb67f4ab45c7ea3f93b8fe6eb1c613f8491642 Mon Sep 17 00:00:00 2001 From: hejl Date: Tue, 11 Jun 2024 08:47:54 +0800 Subject: [PATCH 2/6] add the llama3,mixtral to code comments and cli --- README.md | 2 +- duckduckgo_search/cli.py | 2 +- duckduckgo_search/duckduckgo_search.py | 2 +- duckduckgo_search/duckduckgo_search_async.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index de16459..969ef21 100755 --- a/README.md +++ b/README.md @@ -236,7 +236,7 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". Returns: str: The response from the AI. diff --git a/duckduckgo_search/cli.py b/duckduckgo_search/cli.py index deccd8c..1b609f6 100644 --- a/duckduckgo_search/cli.py +++ b/duckduckgo_search/cli.py @@ -137,7 +137,7 @@ def version(): def chat(save, proxy): """CLI function to perform an interactive AI chat using DuckDuckGo API.""" cache_file = "ddgs_chat_conversation.json" - models = ["gpt-3.5", "claude-3-haiku"] + models = ["gpt-3.5", "claude-3-haiku", "llama-3", "mixtral"] client = DDGS(proxy=proxy) print("DuckDuckGo AI chat. Available models:") diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index fea02a4..de2f903 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -125,7 +125,7 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". Returns: str: The response from the AI. diff --git a/duckduckgo_search/duckduckgo_search_async.py b/duckduckgo_search/duckduckgo_search_async.py index f558599..2e0a1e5 100644 --- a/duckduckgo_search/duckduckgo_search_async.py +++ b/duckduckgo_search/duckduckgo_search_async.py @@ -41,7 +41,7 @@ async def achat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". Returns: str: The response from the AI. From 15fde20d3aaefe3afb5d2ec42a7a38eb21b91d3e Mon Sep 17 00:00:00 2001 From: hejl Date: Tue, 11 Jun 2024 17:23:11 +0800 Subject: [PATCH 3/6] rename the llama-3 and mixtral more specifically --- README.md | 2 +- duckduckgo_search/cli.py | 2 +- duckduckgo_search/duckduckgo_search.py | 6 +++--- duckduckgo_search/duckduckgo_search_async.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 969ef21..0fcd569 100755 --- a/README.md +++ b/README.md @@ -236,7 +236,7 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". Returns: str: The response from the AI. diff --git a/duckduckgo_search/cli.py b/duckduckgo_search/cli.py index 1b609f6..3c68242 100644 --- a/duckduckgo_search/cli.py +++ b/duckduckgo_search/cli.py @@ -137,7 +137,7 @@ def version(): def chat(save, proxy): """CLI function to perform an interactive AI chat using DuckDuckGo API.""" cache_file = "ddgs_chat_conversation.json" - models = ["gpt-3.5", "claude-3-haiku", "llama-3", "mixtral"] + models = ["gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b"] client = DDGS(proxy=proxy) print("DuckDuckGo AI chat. Available models:") diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index de2f903..fdbd49e 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -125,7 +125,7 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". Returns: str: The response from the AI. @@ -133,8 +133,8 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: models = { "claude-3-haiku": "claude-3-haiku-20240307", "gpt-3.5": "gpt-3.5-turbo-0125", - "llama-3": "meta-llama/Llama-3-70b-chat-hf", - "mixtral": "mistralai/Mixtral-8x7B-Instruct-v0.1", + "llama-3-70b": "meta-llama/llama-3-70b-70b-chat-hf", + "mixtral-8x7b": "mistralai/mixtral-8x7b-8x7B-Instruct-v0.1", } # vqd if not self._chat_vqd: diff --git a/duckduckgo_search/duckduckgo_search_async.py b/duckduckgo_search/duckduckgo_search_async.py index 2e0a1e5..3ba629f 100644 --- a/duckduckgo_search/duckduckgo_search_async.py +++ b/duckduckgo_search/duckduckgo_search_async.py @@ -41,7 +41,7 @@ async def achat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3", "mixtral". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". Returns: str: The response from the AI. From fbb0c4b53523e230a7ed5ff89080e9f0f3d96781 Mon Sep 17 00:00:00 2001 From: deedy5 <65482418+deedy5@users.noreply.github.com> Date: Wed, 12 Jun 2024 00:17:05 +0300 Subject: [PATCH 4/6] Ruff fix duckduckgo_search.py --- duckduckgo_search/duckduckgo_search.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index fdbd49e..63e3f02 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -125,7 +125,8 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". + Defaults to "gpt-3.5". Returns: str: The response from the AI. From 49a0fc3437a9755dc0bc94c18e3691db46cd0efd Mon Sep 17 00:00:00 2001 From: deedy5 <65482418+deedy5@users.noreply.github.com> Date: Wed, 12 Jun 2024 00:18:26 +0300 Subject: [PATCH 5/6] Ruff fix duckduckgo_search_async.py --- duckduckgo_search/duckduckgo_search_async.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/duckduckgo_search/duckduckgo_search_async.py b/duckduckgo_search/duckduckgo_search_async.py index 3ba629f..b02cf69 100644 --- a/duckduckgo_search/duckduckgo_search_async.py +++ b/duckduckgo_search/duckduckgo_search_async.py @@ -41,7 +41,8 @@ async def achat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". + Defaults to "gpt-3.5". Returns: str: The response from the AI. From de6641ed413bbe4a95347d8746d4610e2fe0460b Mon Sep 17 00:00:00 2001 From: deedy5 <65482418+deedy5@users.noreply.github.com> Date: Wed, 12 Jun 2024 00:26:07 +0300 Subject: [PATCH 6/6] Ruff format duckduckgo_search.py --- duckduckgo_search/duckduckgo_search.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index 63e3f02..c9218ea 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -125,16 +125,16 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str: Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". + model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b". Defaults to "gpt-3.5". Returns: str: The response from the AI. """ models = { - "claude-3-haiku": "claude-3-haiku-20240307", + "claude-3-haiku": "claude-3-haiku-20240307", "gpt-3.5": "gpt-3.5-turbo-0125", - "llama-3-70b": "meta-llama/llama-3-70b-70b-chat-hf", + "llama-3-70b": "meta-llama/llama-3-70b-70b-chat-hf", "mixtral-8x7b": "mistralai/mixtral-8x7b-8x7B-Instruct-v0.1", } # vqd