From 04fdd1ccfb290e50178c0367eabb2d78121d4b1b Mon Sep 17 00:00:00 2001 From: deedy5 <65482418+deedy5@users.noreply.github.com> Date: Sun, 16 Feb 2025 19:49:33 +0300 Subject: [PATCH] feat(cli chat): stream answer --- duckduckgo_search/cli.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/duckduckgo_search/cli.py b/duckduckgo_search/cli.py index 2954a6d..ced91f4 100644 --- a/duckduckgo_search/cli.py +++ b/duckduckgo_search/cli.py @@ -181,16 +181,18 @@ def chat(load, proxy, multiline, timeout, verify, model): client._chat_tokens_count = cache.get("tokens", 0) while True: - print(f"{'-' * 78}\nYou[{model=} tokens={client._chat_tokens_count}]: ", end="") + click.secho(f"You[{model=} tokens={client._chat_tokens_count}]: ", fg="blue", nl=False) if multiline: - print(f"""[multiline, send message: ctrl+{"Z" if sys.platform == "win32" else "D"}]""") + click.secho(f"""[multiline, send message: ctrl+{"Z" if sys.platform == "win32" else "D"}]""", fg="green") user_input = sys.stdin.read() - print("...") + print() else: user_input = input() if user_input.strip(): - resp_answer = client.chat(keywords=user_input, model=model, timeout=timeout) - click.secho(f"AI: {resp_answer}", fg="green") + click.secho("AI: ", fg="red", nl=False) + for chunk in client.chat_yield(keywords=user_input, model=model, timeout=timeout): + print(chunk, end="") + print() cache = {"vqd": client._chat_vqd, "tokens": client._chat_tokens_count, "messages": client._chat_messages} _save_json(cache_file, cache)