From ad7148b7f0036eee4e8dad3420515689d067bf6d Mon Sep 17 00:00:00 2001 From: anpigon Date: Mon, 27 May 2024 22:13:06 +0900 Subject: [PATCH] feat: Start new chat commit b8700b59a5fe2d7520d7564383eee4b5019d6958 Author: anpigon Date: Mon May 27 22:01:23 2024 +0900 feat: Add useTransition hook and onStartNewChat functionality commit a6898f206d2565ecf9527dcc251ec262cada7a29 Author: anpigon Date: Mon May 27 21:56:15 2024 +0900 doc: Add new translations for chatbot in en.json and ko.json commit 20ef1f8cf19e6f0aaeda8f655df5fa75beee54c1 Author: anpigon Date: Mon May 27 21:56:03 2024 +0900 fix: abort errors --- manifest.json | 2 +- package.json | 2 +- src/features/chatbot/chatbot.tsx | 14 +++++++++-- .../chatbot/components/chatbot-header.tsx | 9 +++++-- src/features/chatbot/hooks/use-llm.ts | 25 ++++++++++--------- src/locales/en.json | 3 ++- src/locales/ko.json | 3 ++- versions.json | 3 ++- 8 files changed, 40 insertions(+), 21 deletions(-) diff --git a/manifest.json b/manifest.json index 7c4f62b..b3d3f82 100644 --- a/manifest.json +++ b/manifest.json @@ -1,7 +1,7 @@ { "id": "max-chatbot", "name": "MAX", - "version": "0.1.7", + "version": "0.2.0", "minAppVersion": "1.5.0", "description": "Generate and brainstorm ideas while creating your notes using Large Language Models (LLMs) from Ollama, LM Studio, Anthropic, OpenAI, Mistral AI, and more for Obsidian.", "author": "anpigon", diff --git a/package.json b/package.json index cb72b34..46fa3bc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "max-chatbot", - "version": "0.1.7", + "version": "0.2.0", "description": "Generate and brainstorm ideas while creating your notes using Large Language Models (LLMs) from Ollama, LM Studio, Anthropic, OpenAI, Mistral AI, and more for Obsidian.", "main": "main.js", "scripts": { diff --git a/src/features/chatbot/chatbot.tsx b/src/features/chatbot/chatbot.tsx index 07ee874..2e04e1f 100644 --- a/src/features/chatbot/chatbot.tsx +++ b/src/features/chatbot/chatbot.tsx @@ -2,7 +2,7 @@ import {DEFAULT_SETTINGS} from '@/constants'; import {useApp, usePlugin, useSettings} from '@/hooks/useApp'; import {Notice} from 'obsidian'; import type {ChangeEvent, KeyboardEvent} from 'react'; -import {useEffect, useRef} from 'react'; +import {useEffect, useRef, useTransition} from 'react'; import {useTranslation} from 'react-i18next'; import {BotMessage} from './components/bot-message'; import {ChatBox} from './components/chat-box'; @@ -21,6 +21,7 @@ export const Chatbot: React.FC = () => { const plugin = usePlugin(); const settings = useSettings(); const {t} = useTranslation('chatbot'); + const [, startTransition] = useTransition(); const formRef = useRef(null); const chatBoxRef = useRef(null); @@ -37,7 +38,7 @@ export const Chatbot: React.FC = () => { const defaultSystemPrompt = t('You are a helpful assistant'); - const {messages, isStreaming, controller, setMessage, processMessage} = useLLM({ + const {messages, setMessages, isStreaming, controller, setMessage, processMessage} = useLLM({ provider: currentModel.provider, model: currentModel.model, systemPrompt: defaultSystemPrompt, @@ -124,6 +125,15 @@ export const Chatbot: React.FC = () => { settings.general.model = newModel; plugin.saveSettings(); }} + onStartNewChat={() => { + controller?.abort(); + startTransition(() => { + setMessages([]); + setMessage(''); + resetInputForm(); + scrollToBottom(); + }); + }} /> diff --git a/src/features/chatbot/components/chatbot-header.tsx b/src/features/chatbot/components/chatbot-header.tsx index 961684c..aa14b38 100644 --- a/src/features/chatbot/components/chatbot-header.tsx +++ b/src/features/chatbot/components/chatbot-header.tsx @@ -2,6 +2,9 @@ import {Dropdown} from '@/components/form/dropdown'; import {LLM_PROVIDERS} from '@/constants'; import {PropsWithChildren} from 'react'; import {ProviderModels} from '../hooks/use-get-ai-models'; +import {Button} from './button'; +import {IconButton} from '@/components/buttons/icon-button'; +import {t} from 'i18next'; interface ChatbotHeaderProps extends PropsWithChildren { botName: string; @@ -12,9 +15,10 @@ interface ChatbotHeaderProps extends PropsWithChildren { model: string; }; onChangeModel: (provider: LLM_PROVIDERS, modelName: string) => void; + onStartNewChat: () => void; } -export const ChatbotHeader: React.FC = ({botName, providers, disabled, currentModel, onChangeModel}) => { +export const ChatbotHeader: React.FC = ({botName, providers, disabled, currentModel, onChangeModel, onStartNewChat}) => { const handleChangeModel: React.ChangeEventHandler = e => { const value = e.target.value; if (value) { @@ -24,7 +28,7 @@ export const ChatbotHeader: React.FC = ({botName, providers, }; return ( -
+

{botName}

= ({botName, providers, return null; })} +
); }; diff --git a/src/features/chatbot/hooks/use-llm.ts b/src/features/chatbot/hooks/use-llm.ts index 74a834d..7c0a20c 100644 --- a/src/features/chatbot/hooks/use-llm.ts +++ b/src/features/chatbot/hooks/use-llm.ts @@ -1,16 +1,16 @@ -import { LLM_PROVIDERS } from '@/constants'; -import { usePlugin } from '@/hooks/useApp'; +import {LLM_PROVIDERS} from '@/constants'; +import {usePlugin} from '@/hooks/useApp'; import useOnceEffect from '@/hooks/useOnceEffect'; -import { ProviderSettings } from '@/types'; +import {ProviderSettings} from '@/types'; import Logger from '@/utils/logging'; -import { ChatOllama } from '@langchain/community/chat_models/ollama'; -import { BaseLanguageModelInput } from '@langchain/core/language_models/base'; -import { AIMessage, HumanMessage, MessageType, SystemMessage, type BaseMessage } from '@langchain/core/messages'; -import { StringOutputParser } from '@langchain/core/output_parsers'; -import { Runnable, RunnableConfig } from '@langchain/core/runnables'; -import { ChatOpenAI } from '@langchain/openai'; -import { TFile, getFrontMatterInfo } from 'obsidian'; -import { useState, useTransition } from 'react'; +import {ChatOllama} from '@langchain/community/chat_models/ollama'; +import {BaseLanguageModelInput} from '@langchain/core/language_models/base'; +import {AIMessage, HumanMessage, MessageType, SystemMessage, type BaseMessage} from '@langchain/core/messages'; +import {StringOutputParser} from '@langchain/core/output_parsers'; +import {Runnable, RunnableConfig} from '@langchain/core/runnables'; +import {ChatOpenAI} from '@langchain/openai'; +import {TFile, getFrontMatterInfo} from 'obsidian'; +import {useState, useTransition} from 'react'; interface UseLLMProps { provider: LLM_PROVIDERS; @@ -137,9 +137,10 @@ export const useLLM = ({provider, model, systemPrompt, allowReferenceCurrentNote await handlers?.onMessageAdded?.({...aiMessage, content: response}); } catch (error) { - if (error instanceof DOMException && error.name === 'AbortError') { + if (error instanceof Error && error.message === 'AbortError') { // Request was aborted, do not show the notice setMessages(messages => { + if (messages.length === 0) return messages; const latestMessage = messages[messages.length - 1]; if (latestMessage.role === 'ai' && latestMessage.content === '') { return [...messages.slice(0, -1), {...latestMessage, content: latestMessage.content + ' (Request aborted)'}]; diff --git a/src/locales/en.json b/src/locales/en.json index bf29020..d3aa84e 100644 --- a/src/locales/en.json +++ b/src/locales/en.json @@ -2,7 +2,8 @@ "chatbot": { "You are a helpful assistant": "You are a helpful assistant.", "Refer to the current note": "Refer to the current note", - "What can I help you with?" : "What can I help you with?" + "What can I help you with?" : "What can I help you with?", + "Start new chat": "Start new chat" }, "settings": { "Profiles": "Profiles", diff --git a/src/locales/ko.json b/src/locales/ko.json index 9378952..8348f88 100644 --- a/src/locales/ko.json +++ b/src/locales/ko.json @@ -2,7 +2,8 @@ "chatbot": { "You are a helpful assistant": "친절한 챗봇으로서 상대방의 요청에 최대한 자세하고 친절하게 답하자. 모든 대답은 한국어(Korean)으로 대답해줘.", "Refer to the current note": "현재 노트 참조", - "What can I help you with?" : "무엇을 도와드릴까요?" + "What can I help you with?" : "무엇을 도와드릴까요?", + "Start new chat": "새 채팅 시작" }, "settings": { "Profiles": "프로필 설정", diff --git a/versions.json b/versions.json index cc58bb6..6730873 100644 --- a/versions.json +++ b/versions.json @@ -8,5 +8,6 @@ "0.1.4": "1.5.0", "0.1.5": "1.5.0", "0.1.6": "1.5.0", - "0.1.7": "1.5.0" + "0.1.7": "1.5.0", + "0.2.0": "1.5.0" } \ No newline at end of file