diff --git a/README.md b/README.md index c47a9eb..43d3b98 100755 --- a/README.md +++ b/README.md @@ -213,13 +213,13 @@ Exceptions: ## 1. chat() - AI chat ```python -def chat(self, keywords: str, model: str = "gpt-4o-mini", timeout: int = 30) -> str: +def chat(self, keywords: str, model: str = "o3-mini", timeout: int = 30) -> str: """Initiates a chat session with DuckDuckGo AI. Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". - Defaults to "gpt-4o-mini". + model (str): The model to use: "o3-mini", "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". + Defaults to "o3-mini". timeout (int): Timeout value for the HTTP client. Defaults to 30. Returns: diff --git a/duckduckgo_search/cli.py b/duckduckgo_search/cli.py index 7e697ff..45f55c6 100644 --- a/duckduckgo_search/cli.py +++ b/duckduckgo_search/cli.py @@ -150,19 +150,20 @@ def version(): "-m", "--model", prompt="""DuckDuckGo AI chat. Choose a model: -[1]: gpt-4o-mini -[2]: claude-3-haiku -[3]: llama-3.1-70b -[4]: mixtral-8x7b +[1]: o3-mini +[2]: gpt-4o-mini +[3]: claude-3-haiku +[4]: llama-3.1-70b +[5]: mixtral-8x7b """, - type=click.Choice(["1", "2", "3", "4"]), + type=click.Choice(["1", "2", "3", "4", "5"]), show_choices=False, default="1", ) def chat(load, proxy, multiline, timeout, verify, model): """CLI function to perform an interactive AI chat using DuckDuckGo API.""" client = DDGS(proxy=_expand_proxy_tb_alias(proxy), verify=verify) - model = ["gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b"][int(model) - 1] + model = ["o3-mini", "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b"][int(model) - 1] cache_file = "ddgs_chat_conversation.json" if load and Path(cache_file).exists(): diff --git a/duckduckgo_search/duckduckgo_search.py b/duckduckgo_search/duckduckgo_search.py index b1abb9e..3d4d742 100644 --- a/duckduckgo_search/duckduckgo_search.py +++ b/duckduckgo_search/duckduckgo_search.py @@ -136,12 +136,12 @@ def _get_vqd(self, keywords: str) -> str: resp_content = self._get_url("GET", "https://duckduckgo.com", params={"q": keywords}) return _extract_vqd(resp_content, keywords) - def chat(self, keywords: str, model: str = "gpt-4o-mini", timeout: int = 30) -> str: + def chat(self, keywords: str, model: str = "o3-mini", timeout: int = 30) -> str: """Initiates a chat session with DuckDuckGo AI. Args: keywords (str): The initial message or question to send to the AI. - model (str): The model to use: "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". + model (str): The model to use: "o3-mini", "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". Defaults to "gpt-4o-mini". timeout (int): Timeout value for the HTTP client. Defaults to 20. @@ -156,6 +156,7 @@ def chat(self, keywords: str, model: str = "gpt-4o-mini", timeout: int = 30) -> logger.info(f"{model=} is deprecated, using {models_deprecated[model]}") model = models_deprecated[model] models = { + "o3-mini": "o3-mini", "claude-3-haiku": "claude-3-haiku-20240307", "gpt-4o-mini": "gpt-4o-mini", "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", diff --git a/tests/test_duckduckgo_search.py b/tests/test_duckduckgo_search.py index 198b930..2f8ce5b 100644 --- a/tests/test_duckduckgo_search.py +++ b/tests/test_duckduckgo_search.py @@ -15,7 +15,7 @@ def test_context_manager(): assert 20 <= len(results) <= 30 -@pytest.mark.parametrize("model", ["gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b"]) +@pytest.mark.parametrize("model", ["o3-mini", "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b"]) def test_chat(model): results = DDGS().chat("cat", model=model) assert len(results) >= 1