From 3d6d504c87751505fbba0bd1857ae36c903607c7 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 17 Nov 2023 21:11:40 +0530 Subject: [PATCH] Enchance streaming --- .../bot/playground/handlers/chat.handler.ts | 49 +++++++++---------- 1 file changed, 23 insertions(+), 26 deletions(-) diff --git a/server/src/routes/api/v1/bot/playground/handlers/chat.handler.ts b/server/src/routes/api/v1/bot/playground/handlers/chat.handler.ts index 0a5f4cf4..05d18c93 100644 --- a/server/src/routes/api/v1/bot/playground/handlers/chat.handler.ts +++ b/server/src/routes/api/v1/bot/playground/handlers/chat.handler.ts @@ -6,8 +6,10 @@ import { chatModelProvider } from "../../../../../../utils/models"; import { DialoqbaseHybridRetrival } from "../../../../../../utils/hybrid"; import { BaseRetriever } from "langchain/schema/retriever"; import { Document } from "langchain/document"; -import { createChain, groupMessagesByConversation } from "../../../../../../chain"; - +import { + createChain, + groupMessagesByConversation, +} from "../../../../../../chain"; export const chatRequestHandler = async ( request: FastifyRequest, @@ -140,10 +142,12 @@ export const chatRequestHandler = async ( const botResponse = await chain.invoke({ question: sanitizedQuestion, - chat_history: groupMessagesByConversation(history.map((message) => ({ - type: message.type, - content: message.text, - })),) + chat_history: groupMessagesByConversation( + history.map((message) => ({ + type: message.type, + content: message.text, + })) + ), }); const documents = await documentPromise; @@ -230,10 +234,6 @@ export const chatRequestStreamHandler = async ( const bot_id = request.params.id; const { message, history, history_id } = request.body; - // const history = JSON.parse(chatHistory) as { - // type: string; - // text: string; - // }[]; try { const prisma = request.server.prisma; @@ -351,27 +351,13 @@ export const chatRequestStreamHandler = async ( } } - let response: any = null; + let response: string = ""; const streamedModel = chatModelProvider( bot.provider, bot.model, temperature, { streaming: true, - callbacks: [ - { - handleLLMNewToken(token: string) { - return reply.sse({ - id: "", - event: "chunk", - data: JSON.stringify({ - message: token || "", - }), - }); - - }, - }, - ], ...botConfig, } ); @@ -397,7 +383,7 @@ export const chatRequestStreamHandler = async ( retriever, }); - response = await chain.invoke({ + let stream = await chain.stream({ question: sanitizedQuestion, chat_history: groupMessagesByConversation( history.map((message) => ({ @@ -407,6 +393,17 @@ export const chatRequestStreamHandler = async ( ), }); + for await (const token of stream) { + reply.sse({ + id: "", + event: "chunk", + data: JSON.stringify({ + message: token || "", + }), + }); + response += token; + } + let historyId = history_id; const documents = await documentPromise;