diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 1bd1e50..f9eaa27 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,7 +6,7 @@ "codespaces": { "openFiles": [ "README.md", - "docs/examples/basics/basic_chat.py", + "docs/examples/features/chat.py", "docs/examples/openai/openai_chat.py" ] }, diff --git a/.gitignore b/.gitignore index 8592da1..c7807b9 100644 --- a/.gitignore +++ b/.gitignore @@ -166,7 +166,6 @@ test-results/ cache/ .DS_STORE -docs/basics.md docs/components.md docs/features.md docs/langchain.md diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 4571b97..698d3a2 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -37,7 +37,6 @@ On contribution: ```bash hatch run pytest -s -m ui --screenshot on --video on --headed -k -hatch run python scripts/postprocess_videos.py hatch run docs-build git checkout -b git add commit @@ -63,7 +62,7 @@ Please ensure it's installed on your system with pip install hatch ``` -Please ensure [Playwright](https://playwright.dev/python/) browsers are installed +Please ensure [Playwright](https://playwright.dev/python/) browsers are installed. ```bash hatch run playwright install chromium @@ -71,13 +70,13 @@ hatch run playwright install chromium The first time `hatch run ...` is run, it will install the required dependencies. -Please ensure `pre-commit` is installed by running +Please ensure `pre-commit` is installed by running: ```bash hatch run pre-commit install ``` -You will also need to set the below environment variables +You will also need to set the below environment variables if it's not already in your environment. ```bash export OPENAI_API_KEY=... @@ -103,7 +102,7 @@ hatch run test ## Run UI tests -To run the Playwright tests in *headed* mode (i.e. show the browser) you can run +To run the Playwright tests in *headed* mode (i.e. show the browser) you can run: ```bash hatch run pytest -s -m ui --headed diff --git a/README.md b/README.md index 873405b..e1c16b0 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,31 @@ Examples using [Panel](https://panel.holoviz.org/) and its [Chat Components](htt https://github.com/holoviz-topics/panel-chat-examples/assets/42288570/cdb78a39-b98c-44e3-886e-29de6a079bde -Panels Chat Components are available from Panel v1.3.0. +Panels Chat Components are available with `pip install "panel>=1.3.0"`; most examples require `pip install "panel>=1.4.0"`. + +## Quick Start + +It's super easy to get started with Panel chat components. + +1. Setup imports +2. Define a function to dictate what to do with the input message +3. Define a servable widget with `callback=response_callback` + +```python +# 1.) +import panel as pn +pn.extension() + +# 2.) +def response_callback(input_message: str, input_user: str, instance: pn.chat.ChatInterface): + # choose your favorite LLM API to respond to the input_message + ... + response_message = f"Echoing your input: {input_message}" + return response_message + +# 3.) +pn.widgets.ChatInterface(callback=response_callback).servable() +``` ## Exploration @@ -21,8 +45,6 @@ To install and serve all examples: ```bash git clone https://github.com/holoviz-topics/panel-chat-examples cd panel-chat-examples -# Optionally create a new virtual environment with conda, venv, etc. -pip install . # Optionally set the OPENAI_API_KEY environment variable panel serve docs/examples/**/*.py --static-dirs thumbnails=docs/assets/thumbnails --autoreload ``` @@ -31,23 +53,6 @@ Then open [http://localhost:5006](http://localhost:5006) in your browser. ![Panel Index Page](https://raw.githubusercontent.com/holoviz-topics/panel-chat-examples/main/assets/images/panel-chat-examples-index-page.png) -### GPU Usage - -Note the default installation is not optimized for GPU usage. To enable GPU support for local -models (i.e. not OpenAI), install `ctransformers` with the [proper backend](https://github.com/marella/ctransformers#gpu) and modify the scripts configs' accordingly, e.g. `n_gpu_layers=1` for a single GPU. - -CUDA: - -```bash -pip install ctransformers[cuda] --no-binary ctransformers --no-cache --no-binary ctransformers --force -``` - -Mac M1/2: - -```bash -CT_METAL=1 hatch run pip install ctransformers --no-binary ctransformers --no-cache --no-binary ctransformers --force # for m1 -``` - ## Contributing We would ❤️ to collaborate with you. Check out the [DEVELOPER GUIDE](https://github.com/holoviz-topics/panel-chat-examples/blob/main/DEVELOPER_GUIDE.md) for to get started. diff --git a/docs/applicable_recipes.md b/docs/applicable_recipes.md new file mode 100644 index 0000000..1ee4710 --- /dev/null +++ b/docs/applicable_recipes.md @@ -0,0 +1,665 @@ +# Applicable Recipes +Demonstrates how to use Panel's chat components to achieve specific tasks with popular LLM packages. + +## Langchain Chat With Pdf + +Demonstrates how to use the `ChatInterface` to chat about a PDF using +OpenAI, [LangChain](https://python.langchain.com/docs/get_started/introduction) and +[Chroma](https://docs.trychroma.com/). + + + + + +
+ +Source code for langchain_chat_with_pdf.py + +```python +""" +Demonstrates how to use the `ChatInterface` to chat about a PDF using +OpenAI, [LangChain](https://python.langchain.com/docs/get_started/introduction) and +[Chroma](https://docs.trychroma.com/). +""" + +import os +import tempfile + +import panel as pn +from langchain.chains import RetrievalQA +from langchain.document_loaders import PyPDFLoader +from langchain.embeddings import OpenAIEmbeddings +from langchain.text_splitter import CharacterTextSplitter +from langchain.vectorstores import Chroma +from langchain_community.chat_models import ChatOpenAI + +pn.extension() + + +@pn.cache +def initialize_chain(pdf, k, chain): + # load document + with tempfile.NamedTemporaryFile("wb", delete=False) as f: + f.write(pdf) + + file_name = f.name + loader = PyPDFLoader(file_name) + documents = loader.load() + # split the documents into chunks + text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) + texts = text_splitter.split_documents(documents) + # select which embeddings we want to use + embeddings = OpenAIEmbeddings() + # create the vectorestore to use as the index + db = Chroma.from_documents(texts, embeddings) + # expose this index in a retriever interface + retriever = db.as_retriever(search_type="similarity", search_kwargs={"k": k}) + # create a chain to answer questions + qa = RetrievalQA.from_chain_type( + llm=ChatOpenAI(), + chain_type=chain, + retriever=retriever, + return_source_documents=True, + verbose=True, + ) + return qa + + +def respond(contents, user, chat_interface): + chat_input.placeholder = "Ask questions here!" + if chat_interface.active == 0: + chat_interface.active = 1 + yield {"user": "OpenAI", "value": "Let's chat about the PDF!"} + + contents.seek(0) + pn.state.cache["pdf"] = contents.read() + return + + qa = initialize_chain(pn.state.cache["pdf"], k_slider.value, chain_select.value) + if key_input.value: + os.environ["OPENAI_API_KEY"] = key_input.value + + response = qa({"query": contents}) + answers = pn.Accordion(("Response", response["result"])) + for doc in response["source_documents"][::-1]: + answers.append((f"Snippet from page {doc.metadata['page']}", doc.page_content)) + answers.active = [0, 1] + yield {"user": "OpenAI", "value": answers} + + +# sidebar widgets +key_input = pn.widgets.PasswordInput( + name="OpenAI Key", + placeholder="sk-...", +) +k_slider = pn.widgets.IntSlider( + name="Number of Relevant Chunks", start=1, end=5, step=1, value=2 +) +chain_select = pn.widgets.RadioButtonGroup( + name="Chain Type", options=["stuff", "map_reduce", "refine", "map_rerank"] +) + +sidebar = pn.Column(key_input, k_slider, chain_select) + +# main widgets +pdf_input = pn.widgets.FileInput(accept=".pdf", value="", height=50) +chat_input = pn.chat.ChatAreaInput(placeholder="First, upload a PDF!") +chat_interface = pn.chat.ChatInterface( + help_text="Please first upload a PDF and click send!", + callback=respond, + sizing_mode="stretch_width", + widgets=[pdf_input, chat_input], + callback_exception="verbose", +) +chat_interface.active = 0 + +# layout +template = pn.template.BootstrapTemplate(sidebar=[sidebar], main=[chat_interface]) +template.servable() +``` +
+ + +## Openai Chat With Hvplot + +We use [OpenAI *Function Calling*](https://platform.openai.com/docs/guides/function-calling) and +[hvPlot](https://hvplot.holoviz.org/) to create an **advanced chatbot** that can create plots. + + + + + +
+ +Source code for openai_chat_with_hvplot.py + +```python +""" +We use [OpenAI *Function Calling*](https://platform.openai.com/docs/guides/function-calling) and +[hvPlot](https://hvplot.holoviz.org/) to create an **advanced chatbot** that can create plots. +""" +import json +from pathlib import Path + +import hvplot.pandas # noqa +import pandas as pd +import panel as pn +from openai import AsyncOpenAI + +ROOT = Path(__file__).parent + +ACCENT = "#00A67E" +THEME = pn.config.theme +CSS_TO_BE_UPSTREAMED_TO_PANEL = """ +a {color: var(--accent-fill-rest) !important;} +a:hover {color: var(--accent-fill-hover) !important;} +div.pn-wrapper{height: calc(100% - 25px)} +#sidebar {padding-left: 5px;background: var(--neutral-fill-active)} +""" + +JSON_THEME = "light" + +MODEL = "gpt-3.5-turbo-1106" +CHAT_GPT_LOGO = "https://upload.wikimedia.org/wikipedia/commons/thumb/0/04/ChatGPT_logo.svg/512px-ChatGPT_logo.svg.png" +CHAT_GPT_URL = "https://chat.openai.com/" +HVPLOT_LOGO = "https://holoviz.org/assets/hvplot.png" +PANEL_LOGO = { + "default": "https://panel.holoviz.org/_static/logo_horizontal_light_theme.png", + "dark": "https://panel.holoviz.org/_static/logo_horizontal_dark_theme.png", +} +PANEL_URL = "https://panel.holoviz.org/index.html" + +pn.chat.message.DEFAULT_AVATARS["assistant"] = HVPLOT_LOGO +pn.chat.ChatMessage.show_reaction_icons = False + + +@pn.cache +def _read_data(): + return pd.read_csv( + "https://raw.githubusercontent.com/kirenz/datasets/master/gapminder.csv" + ) + + +DATA = _read_data() + + +@pn.cache +def _read_tool(name: str) -> dict: + # See https://json-schema.org/learn/glossary + with open(ROOT / f"tool_{name}.json", encoding="utf8") as file: + return json.load(file) + + +TOOLS_MAP = {"hvplot": _read_tool("hvplot"), "renderer": _read_tool("renderer")} +TOOLS = list(TOOLS_MAP.values()) + +HVPLOT_ARGUMENTS = ( + "`" + + "`, `".join(sorted(TOOLS_MAP["hvplot"]["function"]["parameters"]["properties"])) + + "`" +) +EXPLANATION = f""" +## hvPlot by HoloViz +--- + +`hvPlot` is a high-level plotting library that that works almost in the same way as \ +the well known `Pandas` `.plot` method. + +The `.hvplot` method supports more data backends, plotting backends and provides more \ +features than the `.plot` method. + +## OpenAI GPT with Tools +--- + +We are using the OpenAI `{MODEL}` model with the `hvplot` and `renderer` *tools*. + +You can refer to the following `hvplot` arguments + +- {HVPLOT_ARGUMENTS} + +and `renderer` arguments + +- `backend` +""" + +SYSTEM_PROMPT = """\ +You are now a **Plotting Assistant** that helps users plot their data using `hvPlot` \ +by `HoloViz`.\ +""" + +DATA_PROMPT = f"""\ +Hi. Here is a description of your `data`. + +The type is `{DATA.__class__.__name__}`. The `dtypes` are + +```bash +{DATA.dtypes} +```""" + +pn.extension(raw_css=[CSS_TO_BE_UPSTREAMED_TO_PANEL]) + +tools_pane = pn.pane.JSON( + object=TOOLS, depth=6, theme=JSON_THEME, name="Tools", sizing_mode="stretch_both" +) +tabs_layout = pn.Tabs( + pn.Column(name="Plot"), + tools_pane, + pn.Column(name="Arguments"), + sizing_mode="stretch_both", + styles={"border-left": "2px solid var(--neutral-fill-active)"}, + dynamic=True, +) + + +def _powered_by(): + """Returns a component describing the frameworks powering the chat ui""" + params = {"height": 50, "sizing_mode": "fixed", "margin": (10, 10)} + return pn.Column( + pn.Row( + pn.pane.Image(CHAT_GPT_LOGO, **params), + pn.pane.Image(HVPLOT_LOGO, **params), + ), + sizing_mode="stretch_width", + ) + + +def _to_code(kwargs): + """Returns the .hvplot code corresponding to the kwargs""" + code = "data.hvplot(" + if kwargs: + code += "\n" + for key, value in kwargs.items(): + code += f" {key}={repr(value)},\n" + code += ")" + return code + + +def _update_tool_kwargs(tool_calls, original_kwargs): + if tool_calls: + for tool_call in tool_calls: + name = tool_call.function.name + kwargs = json.loads(tool_call.function.arguments) + if kwargs: + # the llm does not always specify both the hvplot and renderer args + # if not is specified its most natural to assume we continue with the + # same args as before + original_kwargs[name] = kwargs + + +def _clean_tool_kwargs(kwargs): + # Sometimes the llm adds the backend argument to the hvplot arguments + backend = kwargs["hvplot"].pop("backend", None) + if backend and "backend" not in kwargs["renderer"]: + # We add the backend argument to the renderer if none is specified + kwargs["renderer"]["backend"] = backend + # Use responsive by default + if "responsive" not in kwargs: + kwargs["hvplot"]["responsive"] = True + + +client = AsyncOpenAI() +tool_kwargs = {"hvplot": {}, "renderer": {}} + + +async def callback( + contents: str, user: str, instance +): # pylint: disable=unused-argument + """Responds to a task""" + messages = instance.serialize() + response = await client.chat.completions.create( + model=MODEL, + messages=messages, + tools=TOOLS, + tool_choice="auto", + ) + response_message = response.choices[0].message + tool_calls = response_message.tool_calls + + _update_tool_kwargs(tool_calls, tool_kwargs) + _clean_tool_kwargs(tool_kwargs) + code = _to_code(tool_kwargs["hvplot"]) + + response = f"Try running\n```python\n{code}\n```\n" + chat_interface.send(response, user="Assistant", respond=False) + plot = DATA.hvplot(**tool_kwargs["hvplot"]) + pane = pn.pane.HoloViews( + object=plot, sizing_mode="stretch_both", name="Plot", **tool_kwargs["renderer"] + ) + arguments = pn.pane.JSON( + tool_kwargs, + sizing_mode="stretch_both", + depth=3, + theme=JSON_THEME, + name="Arguments", + ) + tabs_layout[:] = [pane, tools_pane, arguments] + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + show_rerun=False, + show_undo=False, + show_clear=False, + callback_exception="verbose", +) +chat_interface.send( + SYSTEM_PROMPT, + user="System", + respond=False, +) +chat_interface.send( + DATA_PROMPT, + user="Assistant", + respond=False, +) + + +component = pn.Row(chat_interface, tabs_layout, sizing_mode="stretch_both") + +pn.template.FastListTemplate( + title="Chat with hvPlot", + sidebar=[ + _powered_by(), + EXPLANATION, + ], + main=[component], + main_layout=None, + accent=ACCENT, +).servable() +``` +
+ + +## Openai Two Bots + +Demonstrates how to use the `ChatInterface` to create two bots that chat with each +other. + +Highlights: + +- The user decides the callback user and avatar for the response. +- A system message is used to control the conversation flow. + + + + + +
+ +Source code for openai_two_bots.py + +```python +""" +Demonstrates how to use the `ChatInterface` to create two bots that chat with each +other. + +Highlights: + +- The user decides the callback user and avatar for the response. +- A system message is used to control the conversation flow. +""" + +import panel as pn +from openai import AsyncOpenAI + +pn.extension() + + +async def callback( + contents: str, + user: str, + instance: pn.chat.ChatInterface, +): + if user in ["User", "Happy Bot"]: + callback_user = "Nerd Bot" + callback_avatar = "🤓" + elif user == "Nerd Bot": + callback_user = "Happy Bot" + callback_avatar = "😃" + + if len(instance.objects) % 6 == 0: # stop at every 6 messages + instance.send( + "That's it for now! Thanks for chatting!", user="System", respond=False + ) + return + + prompt = f"Reply profoundly about '{contents}', then follow up with a question." + messages = [{"role": "user", "content": prompt}] + response = await aclient.chat.completions.create( + model="gpt-3.5-turbo", + messages=messages, + stream=True, + max_tokens=250, + temperature=0.1, + ) + + message = "" + async for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield {"user": callback_user, "avatar": callback_avatar, "object": message} + + instance.respond() + + +aclient = AsyncOpenAI() +chat_interface = pn.chat.ChatInterface( + callback=callback, + help_text="Enter a topic for the bots to discuss! Beware the token usage!", +) +chat_interface.servable() +``` +
+ + +## Langchain Chat With Pandas + +Demonstrates how to use the `ChatInterface` and `PanelCallbackHandler` to create a +chatbot to talk to your Pandas DataFrame. This is heavily inspired by the +[LangChain `chat_pandas_df` Reference Example](https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_pandas_df.py). + + + + + +
+ +Source code for langchain_chat_with_pandas.py + +```python +""" +Demonstrates how to use the `ChatInterface` and `PanelCallbackHandler` to create a +chatbot to talk to your Pandas DataFrame. This is heavily inspired by the +[LangChain `chat_pandas_df` Reference Example](https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_pandas_df.py). +""" + +from __future__ import annotations + +from pathlib import Path +from textwrap import dedent + +import pandas as pd +import panel as pn +import param +import requests +from langchain.agents import AgentType +from langchain.chat_models import ChatOpenAI +from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent + +pn.extension("perspective") + +PENGUINS_URL = ( + "https://raw.githubusercontent.com/mwaskom/seaborn-data/master/penguins.csv" +) +PENGUINS_PATH = Path(__file__).parent / "penguins.csv" +if not PENGUINS_PATH.exists(): + response = requests.get(PENGUINS_URL) + PENGUINS_PATH.write_text(response.text) + +FILE_DOWNLOAD_STYLE = """ +.bk-btn a { + padding: 0px; +} +.bk-btn-group > button, .bk-input-group > button { + font-size: small; +} +""" + + +class AgentConfig(param.Parameterized): + """Configuration used for the Pandas Agent""" + + user = param.String("Pandas Agent") + avatar = param.String("🐼") + + show_chain_of_thought = param.Boolean(default=False) + + def _get_agent_message(self, message: str) -> pn.chat.ChatMessage: + return pn.chat.ChatMessage(message, user=self.user, avatar=self.avatar) + + +class AppState(param.Parameterized): + data = param.DataFrame() + + llm = param.Parameter(constant=True) + pandas_df_agent = param.Parameter(constant=True) + + config: AgentConfig = param.ClassSelector(class_=AgentConfig) + + def __init__(self, config: AgentConfig | None = None): + if not config: + config = AgentConfig() + + super().__init__(config=config) + with param.edit_constant(self): + self.llm = ChatOpenAI( + temperature=0, + model="gpt-3.5-turbo-0613", + streaming=True, + ) + + @param.depends("llm", "data", on_init=True, watch=True) + def _reset_pandas_df_agent(self): + with param.edit_constant(self): + if not self.error_message: + self.pandas_df_agent = create_pandas_dataframe_agent( + self.llm, + self.data, + verbose=True, + agent_type=AgentType.OPENAI_FUNCTIONS, + handle_parsing_errors=True, + ) + else: + self.pandas_df_agent = None + + @property + def error_message(self): + if not self.llm and self.data is None: + return "Please **upload a `.csv` file** and click the **send** button." + if self.data is None: + return "Please **upload a `.csv` file** and click the **send** button." + return "" + + @property + def welcome_message(self): + return dedent( + f""" + I'm your LangChain Pandas DataFrame Agent. + + I execute LLM generated Python code under the hood - this can be bad if + the `llm` generated Python code is harmful. Use cautiously! + + {self.error_message}""" + ).strip() + + async def callback(self, contents, user, instance): + if isinstance(contents, pd.DataFrame): + self.data = contents + instance.active = 1 + message = self.config._get_agent_message( + "You can ask me anything about the data. For example " + "'how many species are there?'" + ) + return message + + if self.error_message: + message = self.config._get_agent_message(self.error_message) + return message + + if self.config.show_chain_of_thought: + langchain_callbacks = [ + pn.chat.langchain.PanelCallbackHandler(instance=instance) + ] + else: + langchain_callbacks = [] + + response = await self.pandas_df_agent.arun( + contents, callbacks=langchain_callbacks + ) + message = self.config._get_agent_message(response) + return message + + +state = AppState() + +chat_interface = pn.chat.ChatInterface( + widgets=[ + pn.widgets.FileInput(name="Upload", accept=".csv"), + pn.chat.ChatAreaInput(name="Message", placeholder="Send a message"), + ], + renderers=pn.pane.Perspective, + callback=state.callback, + callback_exception="verbose", + show_rerun=False, + show_undo=False, + show_clear=False, + min_height=400, +) +chat_interface.send( + state.welcome_message, + user=state.config.user, + avatar=state.config.avatar, + respond=False, +) + +download_button = pn.widgets.FileDownload( + PENGUINS_PATH, + button_type="primary", + button_style="outline", + height=30, + width=335, + stylesheets=[FILE_DOWNLOAD_STYLE], +) + +layout = pn.template.MaterialTemplate( + title="🦜 LangChain - Chat with Pandas DataFrame", + main=[chat_interface], + sidebar=[ + download_button, + "#### Agent Settings", + state.config.param.show_chain_of_thought, + ], +) + +layout.servable() +``` +
diff --git a/docs/assets/thumbnails/basic_chat.png b/docs/assets/thumbnails/basic_chat.png deleted file mode 100644 index ac44458..0000000 Binary files a/docs/assets/thumbnails/basic_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/basic_custom_widgets.png b/docs/assets/thumbnails/basic_custom_widgets.png deleted file mode 100644 index 2bface4..0000000 Binary files a/docs/assets/thumbnails/basic_custom_widgets.png and /dev/null differ diff --git a/docs/assets/thumbnails/basic_streaming_chat.png b/docs/assets/thumbnails/basic_streaming_chat.png deleted file mode 100644 index ac44458..0000000 Binary files a/docs/assets/thumbnails/basic_streaming_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/basic_streaming_chat_async.png b/docs/assets/thumbnails/basic_streaming_chat_async.png deleted file mode 100644 index ac44458..0000000 Binary files a/docs/assets/thumbnails/basic_streaming_chat_async.png and /dev/null differ diff --git a/docs/assets/thumbnails/chained_response.png b/docs/assets/thumbnails/chained_response.png new file mode 100644 index 0000000..97465b8 Binary files /dev/null and b/docs/assets/thumbnails/chained_response.png differ diff --git a/docs/assets/thumbnails/component_chat_input.png b/docs/assets/thumbnails/component_chat_input.png deleted file mode 100644 index 40f4064..0000000 Binary files a/docs/assets/thumbnails/component_chat_input.png and /dev/null differ diff --git a/docs/assets/thumbnails/component_environment_widget.png b/docs/assets/thumbnails/component_environment_widget.png deleted file mode 100644 index 91cb81e..0000000 Binary files a/docs/assets/thumbnails/component_environment_widget.png and /dev/null differ diff --git a/docs/assets/thumbnails/component_status.png b/docs/assets/thumbnails/component_status.png deleted file mode 100644 index c411390..0000000 Binary files a/docs/assets/thumbnails/component_status.png and /dev/null differ diff --git a/docs/assets/thumbnails/control_callback_response.png b/docs/assets/thumbnails/control_callback_response.png new file mode 100644 index 0000000..016ca6d Binary files /dev/null and b/docs/assets/thumbnails/control_callback_response.png differ diff --git a/docs/assets/thumbnails/custom_input_widgets.png b/docs/assets/thumbnails/custom_input_widgets.png new file mode 100644 index 0000000..486b724 Binary files /dev/null and b/docs/assets/thumbnails/custom_input_widgets.png differ diff --git a/docs/assets/thumbnails/delayed_placeholder.png b/docs/assets/thumbnails/delayed_placeholder.png new file mode 100644 index 0000000..3b179fd Binary files /dev/null and b/docs/assets/thumbnails/delayed_placeholder.png differ diff --git a/docs/assets/thumbnails/echo_chat.png b/docs/assets/thumbnails/echo_chat.png new file mode 100644 index 0000000..31f9592 Binary files /dev/null and b/docs/assets/thumbnails/echo_chat.png differ diff --git a/docs/assets/thumbnails/feature_chained_response.png b/docs/assets/thumbnails/feature_chained_response.png deleted file mode 100644 index 300be46..0000000 Binary files a/docs/assets/thumbnails/feature_chained_response.png and /dev/null differ diff --git a/docs/assets/thumbnails/feature_delayed_placeholder.png b/docs/assets/thumbnails/feature_delayed_placeholder.png deleted file mode 100644 index 2fd36f3..0000000 Binary files a/docs/assets/thumbnails/feature_delayed_placeholder.png and /dev/null differ diff --git a/docs/assets/thumbnails/feature_replace_response.png b/docs/assets/thumbnails/feature_replace_response.png deleted file mode 100644 index b8ff2a3..0000000 Binary files a/docs/assets/thumbnails/feature_replace_response.png and /dev/null differ diff --git a/docs/assets/thumbnails/feature_slim_interface.png b/docs/assets/thumbnails/feature_slim_interface.png deleted file mode 100644 index 3a75ff8..0000000 Binary files a/docs/assets/thumbnails/feature_slim_interface.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain.png b/docs/assets/thumbnails/langchain.png new file mode 100644 index 0000000..14fe3db Binary files /dev/null and b/docs/assets/thumbnails/langchain.png differ diff --git a/docs/assets/thumbnails/langchain_chat_pandas_df.png b/docs/assets/thumbnails/langchain_chat_pandas_df.png deleted file mode 100644 index ca7a709..0000000 Binary files a/docs/assets/thumbnails/langchain_chat_pandas_df.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_chat_with_pandas.png b/docs/assets/thumbnails/langchain_chat_with_pandas.png new file mode 100644 index 0000000..0fc647c Binary files /dev/null and b/docs/assets/thumbnails/langchain_chat_with_pandas.png differ diff --git a/docs/assets/thumbnails/langchain_chat_with_pdf.png b/docs/assets/thumbnails/langchain_chat_with_pdf.png new file mode 100644 index 0000000..eb9cb3e Binary files /dev/null and b/docs/assets/thumbnails/langchain_chat_with_pdf.png differ diff --git a/docs/assets/thumbnails/langchain_lcel.png b/docs/assets/thumbnails/langchain_lcel.png deleted file mode 100644 index f74a6bb..0000000 Binary files a/docs/assets/thumbnails/langchain_lcel.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_llama_and_mistral.png b/docs/assets/thumbnails/langchain_llama_and_mistral.png deleted file mode 100644 index d40f0fe..0000000 Binary files a/docs/assets/thumbnails/langchain_llama_and_mistral.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_math_assistant.png b/docs/assets/thumbnails/langchain_math_assistant.png deleted file mode 100644 index df9b2ec..0000000 Binary files a/docs/assets/thumbnails/langchain_math_assistant.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_pdf_assistant.png b/docs/assets/thumbnails/langchain_pdf_assistant.png deleted file mode 100644 index 956b3a4..0000000 Binary files a/docs/assets/thumbnails/langchain_pdf_assistant.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_streaming_lcel_with_memory.png b/docs/assets/thumbnails/langchain_streaming_lcel_with_memory.png deleted file mode 100644 index 5fd5266..0000000 Binary files a/docs/assets/thumbnails/langchain_streaming_lcel_with_memory.png and /dev/null differ diff --git a/docs/assets/thumbnails/langchain_with_memory.png b/docs/assets/thumbnails/langchain_with_memory.png deleted file mode 100644 index 274e053..0000000 Binary files a/docs/assets/thumbnails/langchain_with_memory.png and /dev/null differ diff --git a/docs/assets/thumbnails/llama_index_talk_to_github.png b/docs/assets/thumbnails/llama_index_talk_to_github.png deleted file mode 100644 index 122a981..0000000 Binary files a/docs/assets/thumbnails/llama_index_talk_to_github.png and /dev/null differ diff --git a/docs/assets/thumbnails/llamacpp.png b/docs/assets/thumbnails/llamacpp.png new file mode 100644 index 0000000..2638983 Binary files /dev/null and b/docs/assets/thumbnails/llamacpp.png differ diff --git a/docs/assets/thumbnails/mistral_and_llama.png b/docs/assets/thumbnails/mistral_and_llama.png deleted file mode 100644 index 7e78e08..0000000 Binary files a/docs/assets/thumbnails/mistral_and_llama.png and /dev/null differ diff --git a/docs/assets/thumbnails/mistral_api_chat.png b/docs/assets/thumbnails/mistral_api_chat.png deleted file mode 100644 index 7ab42a4..0000000 Binary files a/docs/assets/thumbnails/mistral_api_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/mistral_chat.png b/docs/assets/thumbnails/mistral_chat.png deleted file mode 100644 index 20fde28..0000000 Binary files a/docs/assets/thumbnails/mistral_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/mistral_with_memory.png b/docs/assets/thumbnails/mistral_with_memory.png deleted file mode 100644 index 241f727..0000000 Binary files a/docs/assets/thumbnails/mistral_with_memory.png and /dev/null differ diff --git a/docs/assets/thumbnails/mistralai.png b/docs/assets/thumbnails/mistralai.png new file mode 100644 index 0000000..68c7d75 Binary files /dev/null and b/docs/assets/thumbnails/mistralai.png differ diff --git a/docs/assets/thumbnails/openai.png b/docs/assets/thumbnails/openai.png new file mode 100644 index 0000000..3b13605 Binary files /dev/null and b/docs/assets/thumbnails/openai.png differ diff --git a/docs/assets/thumbnails/openai_async_chat.png b/docs/assets/thumbnails/openai_async_chat.png deleted file mode 100644 index 4d60540..0000000 Binary files a/docs/assets/thumbnails/openai_async_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/openai_authentication.png b/docs/assets/thumbnails/openai_authentication.png deleted file mode 100644 index f865c08..0000000 Binary files a/docs/assets/thumbnails/openai_authentication.png and /dev/null differ diff --git a/docs/assets/thumbnails/openai_chat.png b/docs/assets/thumbnails/openai_chat.png deleted file mode 100644 index 8b36178..0000000 Binary files a/docs/assets/thumbnails/openai_chat.png and /dev/null differ diff --git a/docs/assets/thumbnails/openai_chat_with_hvplot.png b/docs/assets/thumbnails/openai_chat_with_hvplot.png index 3427d2c..6368f03 100644 Binary files a/docs/assets/thumbnails/openai_chat_with_hvplot.png and b/docs/assets/thumbnails/openai_chat_with_hvplot.png differ diff --git a/docs/assets/thumbnails/openai_hvplot.png b/docs/assets/thumbnails/openai_hvplot.png deleted file mode 100644 index 30282b6..0000000 Binary files a/docs/assets/thumbnails/openai_hvplot.png and /dev/null differ diff --git a/docs/assets/thumbnails/openai_image_generation.png b/docs/assets/thumbnails/openai_image_generation.png deleted file mode 100644 index 3dd6f2f..0000000 Binary files a/docs/assets/thumbnails/openai_image_generation.png and /dev/null differ diff --git a/docs/assets/thumbnails/openai_two_bots.png b/docs/assets/thumbnails/openai_two_bots.png index f38a51c..33e6aa1 100644 Binary files a/docs/assets/thumbnails/openai_two_bots.png and b/docs/assets/thumbnails/openai_two_bots.png differ diff --git a/docs/assets/thumbnails/openai_with_memory.png b/docs/assets/thumbnails/openai_with_memory.png deleted file mode 100644 index 4027195..0000000 Binary files a/docs/assets/thumbnails/openai_with_memory.png and /dev/null differ diff --git a/docs/assets/thumbnails/recipes_openai_chat_with_hvplot.png b/docs/assets/thumbnails/recipes_openai_chat_with_hvplot.png new file mode 100644 index 0000000..4b9e15f Binary files /dev/null and b/docs/assets/thumbnails/recipes_openai_chat_with_hvplot.png differ diff --git a/docs/assets/thumbnails/stream_echo_chat.png b/docs/assets/thumbnails/stream_echo_chat.png new file mode 100644 index 0000000..4209e6c Binary files /dev/null and b/docs/assets/thumbnails/stream_echo_chat.png differ diff --git a/docs/assets/thumbnails/styled_slim_interface.png b/docs/assets/thumbnails/styled_slim_interface.png new file mode 100644 index 0000000..38838c6 Binary files /dev/null and b/docs/assets/thumbnails/styled_slim_interface.png differ diff --git a/docs/assets/videos/basic_chat.mp4 b/docs/assets/videos/basic_chat.mp4 deleted file mode 100644 index 1b2cc40..0000000 Binary files a/docs/assets/videos/basic_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/basic_custom_widgets.mp4 b/docs/assets/videos/basic_custom_widgets.mp4 deleted file mode 100644 index 5b16532..0000000 Binary files a/docs/assets/videos/basic_custom_widgets.mp4 and /dev/null differ diff --git a/docs/assets/videos/basic_streaming_chat.mp4 b/docs/assets/videos/basic_streaming_chat.mp4 deleted file mode 100644 index 2cdc31e..0000000 Binary files a/docs/assets/videos/basic_streaming_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/basic_streaming_chat_async.mp4 b/docs/assets/videos/basic_streaming_chat_async.mp4 deleted file mode 100644 index 8cba2a6..0000000 Binary files a/docs/assets/videos/basic_streaming_chat_async.mp4 and /dev/null differ diff --git a/docs/assets/videos/chained_response.mp4 b/docs/assets/videos/chained_response.mp4 new file mode 100644 index 0000000..2982a6f Binary files /dev/null and b/docs/assets/videos/chained_response.mp4 differ diff --git a/docs/assets/videos/component_chat_input.mp4 b/docs/assets/videos/component_chat_input.mp4 deleted file mode 100644 index 23d366d..0000000 Binary files a/docs/assets/videos/component_chat_input.mp4 and /dev/null differ diff --git a/docs/assets/videos/component_environment_widget.mp4 b/docs/assets/videos/component_environment_widget.mp4 deleted file mode 100644 index 48dd9bb..0000000 Binary files a/docs/assets/videos/component_environment_widget.mp4 and /dev/null differ diff --git a/docs/assets/videos/component_status.mp4 b/docs/assets/videos/component_status.mp4 deleted file mode 100644 index c9ae399..0000000 Binary files a/docs/assets/videos/component_status.mp4 and /dev/null differ diff --git a/docs/assets/videos/control_callback_response.mp4 b/docs/assets/videos/control_callback_response.mp4 new file mode 100644 index 0000000..46b2cb4 Binary files /dev/null and b/docs/assets/videos/control_callback_response.mp4 differ diff --git a/docs/assets/videos/custom_input_widgets.mp4 b/docs/assets/videos/custom_input_widgets.mp4 new file mode 100644 index 0000000..56df8d4 Binary files /dev/null and b/docs/assets/videos/custom_input_widgets.mp4 differ diff --git a/docs/assets/videos/delayed_placeholder.mp4 b/docs/assets/videos/delayed_placeholder.mp4 new file mode 100644 index 0000000..b1a641f Binary files /dev/null and b/docs/assets/videos/delayed_placeholder.mp4 differ diff --git a/docs/assets/videos/echo_chat.mp4 b/docs/assets/videos/echo_chat.mp4 new file mode 100644 index 0000000..060e139 Binary files /dev/null and b/docs/assets/videos/echo_chat.mp4 differ diff --git a/docs/assets/videos/feature_chained_response.mp4 b/docs/assets/videos/feature_chained_response.mp4 deleted file mode 100644 index 6600cf0..0000000 Binary files a/docs/assets/videos/feature_chained_response.mp4 and /dev/null differ diff --git a/docs/assets/videos/feature_delayed_placeholder.mp4 b/docs/assets/videos/feature_delayed_placeholder.mp4 deleted file mode 100644 index 4cb1cc7..0000000 Binary files a/docs/assets/videos/feature_delayed_placeholder.mp4 and /dev/null differ diff --git a/docs/assets/videos/feature_replace_response.mp4 b/docs/assets/videos/feature_replace_response.mp4 deleted file mode 100644 index a63a41b..0000000 Binary files a/docs/assets/videos/feature_replace_response.mp4 and /dev/null differ diff --git a/docs/assets/videos/feature_slim_interface.mp4 b/docs/assets/videos/feature_slim_interface.mp4 deleted file mode 100644 index 2202f63..0000000 Binary files a/docs/assets/videos/feature_slim_interface.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain.mp4 b/docs/assets/videos/langchain.mp4 new file mode 100644 index 0000000..f98b131 Binary files /dev/null and b/docs/assets/videos/langchain.mp4 differ diff --git a/docs/assets/videos/langchain_chat_pandas_df.mp4 b/docs/assets/videos/langchain_chat_pandas_df.mp4 deleted file mode 100644 index a6536cc..0000000 Binary files a/docs/assets/videos/langchain_chat_pandas_df.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_chat_with_pandas.mp4 b/docs/assets/videos/langchain_chat_with_pandas.mp4 new file mode 100644 index 0000000..4d9df8e Binary files /dev/null and b/docs/assets/videos/langchain_chat_with_pandas.mp4 differ diff --git a/docs/assets/videos/langchain_chat_with_pdf.mp4 b/docs/assets/videos/langchain_chat_with_pdf.mp4 new file mode 100644 index 0000000..8005596 Binary files /dev/null and b/docs/assets/videos/langchain_chat_with_pdf.mp4 differ diff --git a/docs/assets/videos/langchain_lcel.mp4 b/docs/assets/videos/langchain_lcel.mp4 deleted file mode 100644 index 936faab..0000000 Binary files a/docs/assets/videos/langchain_lcel.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_llama_and_mistral.mp4 b/docs/assets/videos/langchain_llama_and_mistral.mp4 deleted file mode 100644 index 3f8b88b..0000000 Binary files a/docs/assets/videos/langchain_llama_and_mistral.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_math_assistant.mp4 b/docs/assets/videos/langchain_math_assistant.mp4 deleted file mode 100644 index d3a97f8..0000000 Binary files a/docs/assets/videos/langchain_math_assistant.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_pdf_assistant.mp4 b/docs/assets/videos/langchain_pdf_assistant.mp4 deleted file mode 100644 index e5f5eff..0000000 Binary files a/docs/assets/videos/langchain_pdf_assistant.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_streaming_lcel_with_memory.mp4 b/docs/assets/videos/langchain_streaming_lcel_with_memory.mp4 deleted file mode 100644 index 613433a..0000000 Binary files a/docs/assets/videos/langchain_streaming_lcel_with_memory.mp4 and /dev/null differ diff --git a/docs/assets/videos/langchain_with_memory.mp4 b/docs/assets/videos/langchain_with_memory.mp4 deleted file mode 100644 index 02d9767..0000000 Binary files a/docs/assets/videos/langchain_with_memory.mp4 and /dev/null differ diff --git a/docs/assets/videos/llama_index_talk_to_github.mp4 b/docs/assets/videos/llama_index_talk_to_github.mp4 deleted file mode 100644 index 8038fb6..0000000 Binary files a/docs/assets/videos/llama_index_talk_to_github.mp4 and /dev/null differ diff --git a/docs/assets/videos/llamacpp.mp4 b/docs/assets/videos/llamacpp.mp4 new file mode 100644 index 0000000..b90c6ee Binary files /dev/null and b/docs/assets/videos/llamacpp.mp4 differ diff --git a/docs/assets/videos/mistral_and_llama.mp4 b/docs/assets/videos/mistral_and_llama.mp4 deleted file mode 100644 index 2c14ec9..0000000 Binary files a/docs/assets/videos/mistral_and_llama.mp4 and /dev/null differ diff --git a/docs/assets/videos/mistral_api_chat.mp4 b/docs/assets/videos/mistral_api_chat.mp4 deleted file mode 100644 index 3b61c3f..0000000 Binary files a/docs/assets/videos/mistral_api_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/mistral_chat.mp4 b/docs/assets/videos/mistral_chat.mp4 deleted file mode 100644 index 42ec455..0000000 Binary files a/docs/assets/videos/mistral_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/mistral_with_memory.mp4 b/docs/assets/videos/mistral_with_memory.mp4 deleted file mode 100644 index ae4b5c6..0000000 Binary files a/docs/assets/videos/mistral_with_memory.mp4 and /dev/null differ diff --git a/docs/assets/videos/mistralai.mp4 b/docs/assets/videos/mistralai.mp4 new file mode 100644 index 0000000..77a3bb0 Binary files /dev/null and b/docs/assets/videos/mistralai.mp4 differ diff --git a/docs/assets/videos/openai.mp4 b/docs/assets/videos/openai.mp4 new file mode 100644 index 0000000..9431476 Binary files /dev/null and b/docs/assets/videos/openai.mp4 differ diff --git a/docs/assets/videos/openai_async_chat.mp4 b/docs/assets/videos/openai_async_chat.mp4 deleted file mode 100644 index b30cae3..0000000 Binary files a/docs/assets/videos/openai_async_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/openai_authentication.mp4 b/docs/assets/videos/openai_authentication.mp4 deleted file mode 100644 index d161210..0000000 Binary files a/docs/assets/videos/openai_authentication.mp4 and /dev/null differ diff --git a/docs/assets/videos/openai_chat.mp4 b/docs/assets/videos/openai_chat.mp4 deleted file mode 100644 index 2bd0c64..0000000 Binary files a/docs/assets/videos/openai_chat.mp4 and /dev/null differ diff --git a/docs/assets/videos/openai_chat_with_hvplot.mp4 b/docs/assets/videos/openai_chat_with_hvplot.mp4 index 3e74be6..b6d735a 100644 Binary files a/docs/assets/videos/openai_chat_with_hvplot.mp4 and b/docs/assets/videos/openai_chat_with_hvplot.mp4 differ diff --git a/docs/assets/videos/openai_hvplot.mp4 b/docs/assets/videos/openai_hvplot.mp4 deleted file mode 100644 index 8f6903a..0000000 Binary files a/docs/assets/videos/openai_hvplot.mp4 and /dev/null differ diff --git a/docs/assets/videos/openai_image_generation.mp4 b/docs/assets/videos/openai_image_generation.mp4 deleted file mode 100644 index eff62ba..0000000 Binary files a/docs/assets/videos/openai_image_generation.mp4 and /dev/null differ diff --git a/docs/assets/videos/openai_two_bots.mp4 b/docs/assets/videos/openai_two_bots.mp4 index aac8ea2..a562c6b 100644 Binary files a/docs/assets/videos/openai_two_bots.mp4 and b/docs/assets/videos/openai_two_bots.mp4 differ diff --git a/docs/assets/videos/openai_with_memory.mp4 b/docs/assets/videos/openai_with_memory.mp4 deleted file mode 100644 index 6e4f8b6..0000000 Binary files a/docs/assets/videos/openai_with_memory.mp4 and /dev/null differ diff --git a/docs/assets/videos/panel-chat-examples-splash.mp4 b/docs/assets/videos/panel-chat-examples-splash.mp4 deleted file mode 100644 index d90cc1b..0000000 Binary files a/docs/assets/videos/panel-chat-examples-splash.mp4 and /dev/null differ diff --git a/docs/assets/videos/stream_echo_chat.mp4 b/docs/assets/videos/stream_echo_chat.mp4 new file mode 100644 index 0000000..4409c15 Binary files /dev/null and b/docs/assets/videos/stream_echo_chat.mp4 differ diff --git a/docs/assets/videos/styled_slim_interface.mp4 b/docs/assets/videos/styled_slim_interface.mp4 new file mode 100644 index 0000000..d5d73a7 Binary files /dev/null and b/docs/assets/videos/styled_slim_interface.mp4 differ diff --git a/docs/chat_features.md b/docs/chat_features.md new file mode 100644 index 0000000..be7a83b --- /dev/null +++ b/docs/chat_features.md @@ -0,0 +1,512 @@ +# Chat Features +Highlights some features of Panel's chat components; they do not require other packages besides Panel. + +## Echo Chat + +Demonstrates how to use the `ChatInterface` and a `callback` function to respond. + +Highlights: + +- The `ChatInterface` and a `callback` function are used to create a +chatbot that echoes back the message entered by the User. +- The `help_text` parameter is used to provide instructions to the User. + + + + + +
+ +Source code for echo_chat.py + +```python +""" +Demonstrates how to use the `ChatInterface` and a `callback` function to respond. + +Highlights: + +- The `ChatInterface` and a `callback` function are used to create a + chatbot that echoes back the message entered by the User. +- The `help_text` parameter is used to provide instructions to the User. +""" + +import panel as pn + +pn.extension() + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + message = f"Echoing {user}: {contents}" + return message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + help_text="Enter a message in the TextInput below and receive an echo!", +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Stream Echo Chat + +Demonstrates how to use the `ChatInterface` and a `callback` function to +stream back responses. + +The chatbot Assistant echoes back the message entered by the User in an +*async streaming* fashion. + +Highlights: + +- The function is defined as `async` and uses `yield` to stream back responses. +- Initialize `message` first to gather the characters and then `yield` it; +without it, only one letter would be displayed at a time. + + + + + +
+ +Source code for stream_echo_chat.py + +```python +""" +Demonstrates how to use the `ChatInterface` and a `callback` function to +stream back responses. + +The chatbot Assistant echoes back the message entered by the User in an +*async streaming* fashion. + +Highlights: + +- The function is defined as `async` and uses `yield` to stream back responses. +- Initialize `message` first to gather the characters and then `yield` it; + without it, only one letter would be displayed at a time. +""" + + +from asyncio import sleep + +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + await sleep(1) + message = "" + for char in "Echoing User: " + contents: + await sleep(0.05) + message += char + yield message + + +chat_interface = pn.chat.ChatInterface(callback=callback) +chat_interface.send( + "Enter a message below and receive an echo!", + user="System", + respond=False, +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Custom Input Widgets + +Demonstrates how to use the `ChatInterface` and custom widgets, +like `ChatAreaInput` and `FileInput`, to create a chatbot that counts +the number of lines in a message or file. + +Highlights: + +- The `ChatAreaInput` and `FileInput` widgets are used to create a custom +chatbot that counts the number of lines in a message or file. +- The `callback` function is used to count the number of lines in the message +or file and return the result to the User. + + + + + +
+ +Source code for custom_input_widgets.py + +```python +""" +Demonstrates how to use the `ChatInterface` and custom widgets, +like `ChatAreaInput` and `FileInput`, to create a chatbot that counts +the number of lines in a message or file. + +Highlights: + +- The `ChatAreaInput` and `FileInput` widgets are used to create a custom + chatbot that counts the number of lines in a message or file. +- The `callback` function is used to count the number of lines in the message + or file and return the result to the User. +""" + +import panel as pn + +pn.extension() + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + lines = contents.strip().count("\n") + message = f"This snippet has {lines + 1} lines." + return message + + +chat_input = pn.chat.ChatAreaInput(placeholder="Send a message") +file_input = pn.widgets.FileInput(accept=".py") +chat_interface = pn.chat.ChatInterface( + callback=callback, widgets=[chat_input, file_input] +) +chat_interface.send( + "Enter a message in the ChatAreaInput below to count how many lines there is, " + "or upload a Python file to count the number of lines in the file.", + user="System", + respond=False, +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Delayed Placeholder + +Demonstrates how to delay the display of the placeholder. + +Highlights: + +- The `placeholder_threshold` parameter is used to delay the display of the placeholder. +If the response time is less than the threshold, the placeholder will not be displayed. +- The `placeholder_text` parameter is used to customize the placeholder text. + + + + + +
+ +Source code for delayed_placeholder.py + +```python +""" +Demonstrates how to delay the display of the placeholder. + +Highlights: + +- The `placeholder_threshold` parameter is used to delay the display of the placeholder. + If the response time is less than the threshold, the placeholder will not be displayed. +- The `placeholder_text` parameter is used to customize the placeholder text. +""" + +from asyncio import sleep + +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + try: + seconds = float(contents) + if 0 < seconds < 10: + await sleep(seconds) + return f"Slept {contents} seconds!" + else: + return "Please enter a number between 1 and 9!" + except ValueError: + return "Please enter a number!" + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + placeholder_threshold=2, + placeholder_text="Waiting for reply...", +) +chat_interface.send( + "Send a number to make the system sleep between 1 and 9 seconds!", + user="System", + respond=False, +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Chained Response + +Demonstrates how to chain responses from a single message in the callback. + +Highlight: + +- The `respond` parameter in the `send` method is used to chain responses. +- It's also possible to use `respond` as a method to chain responses. + + + + + +
+ +Source code for chained_response.py + +```python +""" +Demonstrates how to chain responses from a single message in the callback. + +Highlight: + +- The `respond` parameter in the `send` method is used to chain responses. +- It's also possible to use `respond` as a method to chain responses. +""" + +from asyncio import sleep + +import panel as pn + +pn.extension() + +PERSON_1 = "Happy User" +PERSON_2 = "Excited User" +PERSON_3 = "Passionate User" + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + await sleep(2) + if user == "User": + instance.send( + f"Hey, {PERSON_2}! Did you hear the user?", + user=PERSON_1, + avatar="😊", + respond=True, # This is the default, but it's here for clarity + ) + elif user == PERSON_1: + user_message = instance.objects[-2] + user_contents = user_message.object + yield pn.chat.ChatMessage( + f'Yeah, they said "{user_contents}"! Did you also hear {PERSON_3}?', + user=PERSON_2, + avatar="😄", + ) + instance.respond() + elif user == PERSON_2: + instance.send( + f"Yup, I heard!", + user=PERSON_3, + avatar="😆", + respond=False, + ) + + +chat_interface = pn.chat.ChatInterface( + help_text="Send a message to start the conversation!", callback=callback +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Control Callback Response + +Demonstrates how to precisely control the callback response. + +Highlights: + +- Use a placeholder text to display a message while waiting for the response. +- Use a placeholder threshold to control when the placeholder text is displayed. +- Use send instead of stream/yield/return to keep the placeholder text while still sending a message, ensuring respond=False to avoid a recursive loop. +- Use yield to continuously update the response message. +- Use pn.chat.ChatMessage or dict to send a message with a custom user and avatar. + + + + + +
+ +Source code for control_callback_response.py + +```python +""" +Demonstrates how to precisely control the callback response. + +Highlights: + +- Use a placeholder text to display a message while waiting for the response. +- Use a placeholder threshold to control when the placeholder text is displayed. +- Use send instead of stream/yield/return to keep the placeholder text while still sending a message, ensuring respond=False to avoid a recursive loop. +- Use yield to continuously update the response message. +- Use pn.chat.ChatMessage or dict to send a message with a custom user and avatar. +""" + +from asyncio import sleep +from random import choice + +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + await sleep(0.5) + # use send instead of stream/yield/return to keep the placeholder text + # while still sending a message; ensure respond=False to avoid a recursive loop + instance.send( + "Let me flip the coin for you...", user="Game Master", avatar="🎲", respond=False + ) + await sleep(1) + + characters = "/|\\_" + index = 0 + for _ in range(0, 28): + index = (index + 1) % len(characters) + # use yield to continuously update the response message + # use pn.chat.ChatMessage to send a message with a custom user and avatar + yield pn.chat.ChatMessage("\r" + characters[index], user="Coin", avatar="🪙") + await sleep(0.005) + + result = choice(["heads", "tails"]) + if result in contents.lower(): + # equivalently, use a dict instead of a pn.chat.ChatMessage + yield {"object": f"Woohoo, {result}! You win!", "user": "Coin", "avatar": "🎲"} + else: + yield {"object": f"Aw, got {result}. Try again!", "user": "Coin", "avatar": "🎲"} + + +chat_interface = pn.chat.ChatInterface( + widgets=[ + pn.widgets.RadioButtonGroup( + options=["Heads!", "Tails!"], button_type="primary", button_style="outline" + ) + ], + callback=callback, + help_text="Select heads or tails, then click send!", + placeholder_text="Waiting for the result...", + placeholder_threshold=0.1, +) +chat_interface.servable() +``` +
+ +Live Apps: Pyodide + +## Styled Slim Interface + +Demonstrates how to create a slim `ChatInterface` that fits in the sidebar. + +Highlights: + +- The `ChatInterface` is placed in the sidebar. +- Set `show_*` parameters to `False` to hide the respective buttons. +- Use `message_params` to customize the appearance of each chat messages. + + + + + +
+ +Source code for styled_slim_interface.py + +```python +""" +Demonstrates how to create a slim `ChatInterface` that fits in the sidebar. + +Highlights: + +- The `ChatInterface` is placed in the sidebar. +- Set `show_*` parameters to `False` to hide the respective buttons. +- Use `message_params` to customize the appearance of each chat messages. +""" +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + message = f"Echoing {user}: {contents}" + return message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + show_send=False, + show_rerun=False, + show_undo=False, + show_clear=False, + show_avatar=False, + show_timestamp=False, + show_button_name=False, + show_reaction_icons=False, + sizing_mode="stretch_width", + height=700, + message_params={ + "stylesheets": [ + """ + .message { + font-size: 1em; + } + .name { + font-size: 0.9em; + } + .timestamp { + font-size: 0.9em; + } + """ + ] + }, +) + +main = """ +We've put a *slim* `ChatInterface` in the sidebar. In the main area you +could add the object you are chatting about +""" + +pn.template.FastListTemplate( + main=[main], + sidebar=[chat_interface], + sidebar_width=500, +).servable() +``` +
+ +Live Apps: Pyodide diff --git a/docs/examples/langchain/langchain_chat_pandas_df.py b/docs/examples/applicable_recipes/langchain_chat_with_pandas.py similarity index 74% rename from docs/examples/langchain/langchain_chat_pandas_df.py rename to docs/examples/applicable_recipes/langchain_chat_with_pandas.py index 041374a..4512684 100644 --- a/docs/examples/langchain/langchain_chat_pandas_df.py +++ b/docs/examples/applicable_recipes/langchain_chat_with_pandas.py @@ -3,6 +3,7 @@ chatbot to talk to your Pandas DataFrame. This is heavily inspired by the [LangChain `chat_pandas_df` Reference Example](https://github.com/langchain-ai/streamlit-agent/blob/main/streamlit_agent/chat_pandas_df.py). """ + from __future__ import annotations from pathlib import Path @@ -16,8 +17,6 @@ from langchain.chat_models import ChatOpenAI from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent -from panel_chat_examples import EnvironmentWidgetBase - pn.extension("perspective") PENGUINS_URL = ( @@ -38,12 +37,6 @@ """ -class Environment(EnvironmentWidgetBase): - """Will be asking the user for the API Key if not set as environment variable""" - - OPENAI_API_KEY = param.String() - - class AgentConfig(param.Parameterized): """Configuration used for the Pandas Agent""" @@ -63,29 +56,18 @@ class AppState(param.Parameterized): pandas_df_agent = param.Parameter(constant=True) config: AgentConfig = param.ClassSelector(class_=AgentConfig) - environ: Environment = param.ClassSelector(class_=Environment, constant=True) - def __init__( - self, config: AgentConfig | None = None, environ: Environment | None = None - ): + def __init__(self, config: AgentConfig | None = None): if not config: config = AgentConfig() - if not environ: - environ = Environment() - super().__init__(config=config, environ=environ) - @param.depends("environ.OPENAI_API_KEY", on_init=True, watch=True) - def _reset_llm(self): + super().__init__(config=config) with param.edit_constant(self): - if self.environ.OPENAI_API_KEY: - self.llm = ChatOpenAI( - temperature=0, - model="gpt-3.5-turbo-0613", - api_key=self.environ.OPENAI_API_KEY, - streaming=True, - ) - else: - self.llm = None + self.llm = ChatOpenAI( + temperature=0, + model="gpt-3.5-turbo-0613", + streaming=True, + ) @param.depends("llm", "data", on_init=True, watch=True) def _reset_pandas_df_agent(self): @@ -104,13 +86,7 @@ def _reset_pandas_df_agent(self): @property def error_message(self): if not self.llm and self.data is None: - return dedent( - """\ - Please provide your `OPENAI_API_KEY`, **upload a `.csv` file** - and click the **send** button.""" - ) - if not self.llm: - return "Please provide your `OPENAI_API_KEY`." + return "Please **upload a `.csv` file** and click the **send** button." if self.data is None: return "Please **upload a `.csv` file** and click the **send** button." return "" @@ -134,10 +110,8 @@ async def callback(self, contents, user, instance): self.data = contents instance.active = 1 message = self.config._get_agent_message( - dedent( - """You can ask me anything about the data. For example 'how many - species are there?'""" - ) + "You can ask me anything about the data. For example " + "'how many species are there?'" ) return message @@ -163,8 +137,8 @@ async def callback(self, contents, user, instance): chat_interface = pn.chat.ChatInterface( widgets=[ - pn.widgets.FileInput(name="Upload"), - pn.widgets.TextInput(name="Message", placeholder="Send a message"), + pn.widgets.FileInput(name="Upload", accept=".csv"), + pn.chat.ChatAreaInput(name="Message", placeholder="Send a message"), ], renderers=pn.pane.Perspective, callback=state.callback, @@ -200,7 +174,4 @@ async def callback(self, contents, user, instance): ], ) -if state.environ.variables_not_set: - layout.sidebar.append(state.environ) - layout.servable() diff --git a/docs/examples/applicable_recipes/langchain_chat_with_pdf.py b/docs/examples/applicable_recipes/langchain_chat_with_pdf.py new file mode 100644 index 0000000..b0fbabf --- /dev/null +++ b/docs/examples/applicable_recipes/langchain_chat_with_pdf.py @@ -0,0 +1,100 @@ +""" +Demonstrates how to use the `ChatInterface` to chat about a PDF using +OpenAI, [LangChain](https://python.langchain.com/docs/get_started/introduction) and +[Chroma](https://docs.trychroma.com/). +""" + +import os +import tempfile + +import panel as pn +from langchain.chains import RetrievalQA +from langchain.document_loaders import PyPDFLoader +from langchain.embeddings import OpenAIEmbeddings +from langchain.text_splitter import CharacterTextSplitter +from langchain.vectorstores import Chroma +from langchain_community.chat_models import ChatOpenAI + +pn.extension() + + +@pn.cache +def initialize_chain(pdf, k, chain): + # load document + with tempfile.NamedTemporaryFile("wb", delete=False) as f: + f.write(pdf) + + file_name = f.name + loader = PyPDFLoader(file_name) + documents = loader.load() + # split the documents into chunks + text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) + texts = text_splitter.split_documents(documents) + # select which embeddings we want to use + embeddings = OpenAIEmbeddings() + # create the vectorestore to use as the index + db = Chroma.from_documents(texts, embeddings) + # expose this index in a retriever interface + retriever = db.as_retriever(search_type="similarity", search_kwargs={"k": k}) + # create a chain to answer questions + qa = RetrievalQA.from_chain_type( + llm=ChatOpenAI(), + chain_type=chain, + retriever=retriever, + return_source_documents=True, + verbose=True, + ) + return qa + + +def respond(contents, user, chat_interface): + chat_input.placeholder = "Ask questions here!" + if chat_interface.active == 0: + chat_interface.active = 1 + yield {"user": "OpenAI", "value": "Let's chat about the PDF!"} + + contents.seek(0) + pn.state.cache["pdf"] = contents.read() + return + + qa = initialize_chain(pn.state.cache["pdf"], k_slider.value, chain_select.value) + if key_input.value: + os.environ["OPENAI_API_KEY"] = key_input.value + + response = qa({"query": contents}) + answers = pn.Accordion(("Response", response["result"])) + for doc in response["source_documents"][::-1]: + answers.append((f"Snippet from page {doc.metadata['page']}", doc.page_content)) + answers.active = [0, 1] + yield {"user": "OpenAI", "value": answers} + + +# sidebar widgets +key_input = pn.widgets.PasswordInput( + name="OpenAI Key", + placeholder="sk-...", +) +k_slider = pn.widgets.IntSlider( + name="Number of Relevant Chunks", start=1, end=5, step=1, value=2 +) +chain_select = pn.widgets.RadioButtonGroup( + name="Chain Type", options=["stuff", "map_reduce", "refine", "map_rerank"] +) + +sidebar = pn.Column(key_input, k_slider, chain_select) + +# main widgets +pdf_input = pn.widgets.FileInput(accept=".pdf", value="", height=50) +chat_input = pn.chat.ChatAreaInput(placeholder="First, upload a PDF!") +chat_interface = pn.chat.ChatInterface( + help_text="Please first upload a PDF and click send!", + callback=respond, + sizing_mode="stretch_width", + widgets=[pdf_input, chat_input], + callback_exception="verbose", +) +chat_interface.active = 0 + +# layout +template = pn.template.BootstrapTemplate(sidebar=[sidebar], main=[chat_interface]) +template.servable() diff --git a/docs/examples/openai/openai_chat_with_hvplot.py b/docs/examples/applicable_recipes/openai_chat_with_hvplot.py similarity index 91% rename from docs/examples/openai/openai_chat_with_hvplot.py rename to docs/examples/applicable_recipes/openai_chat_with_hvplot.py index 5378c3f..e8f4631 100644 --- a/docs/examples/openai/openai_chat_with_hvplot.py +++ b/docs/examples/applicable_recipes/openai_chat_with_hvplot.py @@ -6,10 +6,8 @@ from pathlib import Path import hvplot.pandas # noqa -import matplotlib.pyplot as plt import pandas as pd import panel as pn -import plotly.io as pio from openai import AsyncOpenAI ROOT = Path(__file__).parent @@ -41,10 +39,8 @@ @pn.cache def _read_data(): - return ( - pd.read_csv(ROOT / "ABC.csv", parse_dates=["date"]) - .sort_values(by="date", ascending=False) - .head(100) + return pd.read_csv( + "https://raw.githubusercontent.com/kirenz/datasets/master/gapminder.csv" ) @@ -104,10 +100,7 @@ def _read_tool(name: str) -> dict: {DATA.dtypes} ```""" -pn.extension( - "plotly", - raw_css=[CSS_TO_BE_UPSTREAMED_TO_PANEL], -) +pn.extension(raw_css=[CSS_TO_BE_UPSTREAMED_TO_PANEL]) tools_pane = pn.pane.JSON( object=TOOLS, depth=6, theme=JSON_THEME, name="Tools", sizing_mode="stretch_both" @@ -118,6 +111,7 @@ def _read_tool(name: str) -> dict: pn.Column(name="Arguments"), sizing_mode="stretch_both", styles={"border-left": "2px solid var(--neutral-fill-active)"}, + dynamic=True, ) @@ -167,15 +161,6 @@ def _clean_tool_kwargs(kwargs): kwargs["hvplot"]["responsive"] = True -def _set_theme(): - if THEME == "dark": - pio.templates.default = "plotly_dark" - plt.style.use(["default", "dark_background"]) - else: - pio.templates.default = "plotly" - plt.style.use(["default", "seaborn-v0_8"]) - - client = AsyncOpenAI() tool_kwargs = {"hvplot": {}, "renderer": {}} @@ -198,15 +183,9 @@ async def callback( _clean_tool_kwargs(tool_kwargs) code = _to_code(tool_kwargs["hvplot"]) - response = f""" -Try running - -```python -{code} -```""" + response = f"Try running\n```python\n{code}\n```\n" chat_interface.send(response, user="Assistant", respond=False) plot = DATA.hvplot(**tool_kwargs["hvplot"]) - _set_theme() pane = pn.pane.HoloViews( object=plot, sizing_mode="stretch_both", name="Plot", **tool_kwargs["renderer"] ) diff --git a/docs/examples/openai/openai_two_bots.py b/docs/examples/applicable_recipes/openai_two_bots.py similarity index 71% rename from docs/examples/openai/openai_two_bots.py rename to docs/examples/applicable_recipes/openai_two_bots.py index f839194..552afe7 100644 --- a/docs/examples/openai/openai_two_bots.py +++ b/docs/examples/applicable_recipes/openai_two_bots.py @@ -1,6 +1,11 @@ """ Demonstrates how to use the `ChatInterface` to create two bots that chat with each other. + +Highlights: + +- The user decides the callback user and avatar for the response. +- A system message is used to control the conversation flow. """ import panel as pn @@ -21,14 +26,22 @@ async def callback( callback_user = "Happy Bot" callback_avatar = "😃" - prompt = f"Think profoundly about {contents}, then ask a question." + if len(instance.objects) % 6 == 0: # stop at every 6 messages + instance.send( + "That's it for now! Thanks for chatting!", user="System", respond=False + ) + return + + prompt = f"Reply profoundly about '{contents}', then follow up with a question." + messages = [{"role": "user", "content": prompt}] response = await aclient.chat.completions.create( model="gpt-3.5-turbo", - messages=[{"role": "user", "content": prompt}], + messages=messages, stream=True, max_tokens=250, temperature=0.1, ) + message = "" async for chunk in response: part = chunk.choices[0].delta.content @@ -36,19 +49,12 @@ async def callback( message += part yield {"user": callback_user, "avatar": callback_avatar, "object": message} - if len(instance.objects) % 6 == 0: # stop at every 6 messages - instance.send( - "That's it for now! Thanks for chatting!", user="System", respond=False - ) - return instance.respond() aclient = AsyncOpenAI() -chat_interface = pn.chat.ChatInterface(callback=callback) -chat_interface.send( - "Enter a topic for the bots to discuss! Beware the token usage!", - user="System", - respond=False, +chat_interface = pn.chat.ChatInterface( + callback=callback, + help_text="Enter a topic for the bots to discuss! Beware the token usage!", ) chat_interface.servable() diff --git a/docs/examples/applicable_recipes/penguins.csv b/docs/examples/applicable_recipes/penguins.csv new file mode 100644 index 0000000..51fd0fe --- /dev/null +++ b/docs/examples/applicable_recipes/penguins.csv @@ -0,0 +1,345 @@ +species,island,bill_length_mm,bill_depth_mm,flipper_length_mm,body_mass_g,sex +Adelie,Torgersen,39.1,18.7,181,3750,MALE +Adelie,Torgersen,39.5,17.4,186,3800,FEMALE +Adelie,Torgersen,40.3,18,195,3250,FEMALE +Adelie,Torgersen,,,,, +Adelie,Torgersen,36.7,19.3,193,3450,FEMALE +Adelie,Torgersen,39.3,20.6,190,3650,MALE +Adelie,Torgersen,38.9,17.8,181,3625,FEMALE +Adelie,Torgersen,39.2,19.6,195,4675,MALE +Adelie,Torgersen,34.1,18.1,193,3475, +Adelie,Torgersen,42,20.2,190,4250, +Adelie,Torgersen,37.8,17.1,186,3300, +Adelie,Torgersen,37.8,17.3,180,3700, +Adelie,Torgersen,41.1,17.6,182,3200,FEMALE +Adelie,Torgersen,38.6,21.2,191,3800,MALE +Adelie,Torgersen,34.6,21.1,198,4400,MALE +Adelie,Torgersen,36.6,17.8,185,3700,FEMALE +Adelie,Torgersen,38.7,19,195,3450,FEMALE +Adelie,Torgersen,42.5,20.7,197,4500,MALE +Adelie,Torgersen,34.4,18.4,184,3325,FEMALE +Adelie,Torgersen,46,21.5,194,4200,MALE +Adelie,Biscoe,37.8,18.3,174,3400,FEMALE +Adelie,Biscoe,37.7,18.7,180,3600,MALE +Adelie,Biscoe,35.9,19.2,189,3800,FEMALE +Adelie,Biscoe,38.2,18.1,185,3950,MALE +Adelie,Biscoe,38.8,17.2,180,3800,MALE +Adelie,Biscoe,35.3,18.9,187,3800,FEMALE +Adelie,Biscoe,40.6,18.6,183,3550,MALE +Adelie,Biscoe,40.5,17.9,187,3200,FEMALE +Adelie,Biscoe,37.9,18.6,172,3150,FEMALE +Adelie,Biscoe,40.5,18.9,180,3950,MALE +Adelie,Dream,39.5,16.7,178,3250,FEMALE +Adelie,Dream,37.2,18.1,178,3900,MALE +Adelie,Dream,39.5,17.8,188,3300,FEMALE +Adelie,Dream,40.9,18.9,184,3900,MALE +Adelie,Dream,36.4,17,195,3325,FEMALE +Adelie,Dream,39.2,21.1,196,4150,MALE +Adelie,Dream,38.8,20,190,3950,MALE +Adelie,Dream,42.2,18.5,180,3550,FEMALE +Adelie,Dream,37.6,19.3,181,3300,FEMALE +Adelie,Dream,39.8,19.1,184,4650,MALE +Adelie,Dream,36.5,18,182,3150,FEMALE +Adelie,Dream,40.8,18.4,195,3900,MALE +Adelie,Dream,36,18.5,186,3100,FEMALE +Adelie,Dream,44.1,19.7,196,4400,MALE +Adelie,Dream,37,16.9,185,3000,FEMALE +Adelie,Dream,39.6,18.8,190,4600,MALE +Adelie,Dream,41.1,19,182,3425,MALE +Adelie,Dream,37.5,18.9,179,2975, +Adelie,Dream,36,17.9,190,3450,FEMALE +Adelie,Dream,42.3,21.2,191,4150,MALE +Adelie,Biscoe,39.6,17.7,186,3500,FEMALE +Adelie,Biscoe,40.1,18.9,188,4300,MALE +Adelie,Biscoe,35,17.9,190,3450,FEMALE +Adelie,Biscoe,42,19.5,200,4050,MALE +Adelie,Biscoe,34.5,18.1,187,2900,FEMALE +Adelie,Biscoe,41.4,18.6,191,3700,MALE +Adelie,Biscoe,39,17.5,186,3550,FEMALE +Adelie,Biscoe,40.6,18.8,193,3800,MALE +Adelie,Biscoe,36.5,16.6,181,2850,FEMALE +Adelie,Biscoe,37.6,19.1,194,3750,MALE +Adelie,Biscoe,35.7,16.9,185,3150,FEMALE +Adelie,Biscoe,41.3,21.1,195,4400,MALE +Adelie,Biscoe,37.6,17,185,3600,FEMALE +Adelie,Biscoe,41.1,18.2,192,4050,MALE +Adelie,Biscoe,36.4,17.1,184,2850,FEMALE +Adelie,Biscoe,41.6,18,192,3950,MALE +Adelie,Biscoe,35.5,16.2,195,3350,FEMALE +Adelie,Biscoe,41.1,19.1,188,4100,MALE +Adelie,Torgersen,35.9,16.6,190,3050,FEMALE +Adelie,Torgersen,41.8,19.4,198,4450,MALE +Adelie,Torgersen,33.5,19,190,3600,FEMALE +Adelie,Torgersen,39.7,18.4,190,3900,MALE +Adelie,Torgersen,39.6,17.2,196,3550,FEMALE +Adelie,Torgersen,45.8,18.9,197,4150,MALE +Adelie,Torgersen,35.5,17.5,190,3700,FEMALE +Adelie,Torgersen,42.8,18.5,195,4250,MALE +Adelie,Torgersen,40.9,16.8,191,3700,FEMALE +Adelie,Torgersen,37.2,19.4,184,3900,MALE +Adelie,Torgersen,36.2,16.1,187,3550,FEMALE +Adelie,Torgersen,42.1,19.1,195,4000,MALE +Adelie,Torgersen,34.6,17.2,189,3200,FEMALE +Adelie,Torgersen,42.9,17.6,196,4700,MALE +Adelie,Torgersen,36.7,18.8,187,3800,FEMALE +Adelie,Torgersen,35.1,19.4,193,4200,MALE +Adelie,Dream,37.3,17.8,191,3350,FEMALE +Adelie,Dream,41.3,20.3,194,3550,MALE +Adelie,Dream,36.3,19.5,190,3800,MALE +Adelie,Dream,36.9,18.6,189,3500,FEMALE +Adelie,Dream,38.3,19.2,189,3950,MALE +Adelie,Dream,38.9,18.8,190,3600,FEMALE +Adelie,Dream,35.7,18,202,3550,FEMALE +Adelie,Dream,41.1,18.1,205,4300,MALE +Adelie,Dream,34,17.1,185,3400,FEMALE +Adelie,Dream,39.6,18.1,186,4450,MALE +Adelie,Dream,36.2,17.3,187,3300,FEMALE +Adelie,Dream,40.8,18.9,208,4300,MALE +Adelie,Dream,38.1,18.6,190,3700,FEMALE +Adelie,Dream,40.3,18.5,196,4350,MALE +Adelie,Dream,33.1,16.1,178,2900,FEMALE +Adelie,Dream,43.2,18.5,192,4100,MALE +Adelie,Biscoe,35,17.9,192,3725,FEMALE +Adelie,Biscoe,41,20,203,4725,MALE +Adelie,Biscoe,37.7,16,183,3075,FEMALE +Adelie,Biscoe,37.8,20,190,4250,MALE +Adelie,Biscoe,37.9,18.6,193,2925,FEMALE +Adelie,Biscoe,39.7,18.9,184,3550,MALE +Adelie,Biscoe,38.6,17.2,199,3750,FEMALE +Adelie,Biscoe,38.2,20,190,3900,MALE +Adelie,Biscoe,38.1,17,181,3175,FEMALE +Adelie,Biscoe,43.2,19,197,4775,MALE +Adelie,Biscoe,38.1,16.5,198,3825,FEMALE +Adelie,Biscoe,45.6,20.3,191,4600,MALE +Adelie,Biscoe,39.7,17.7,193,3200,FEMALE +Adelie,Biscoe,42.2,19.5,197,4275,MALE +Adelie,Biscoe,39.6,20.7,191,3900,FEMALE +Adelie,Biscoe,42.7,18.3,196,4075,MALE +Adelie,Torgersen,38.6,17,188,2900,FEMALE +Adelie,Torgersen,37.3,20.5,199,3775,MALE +Adelie,Torgersen,35.7,17,189,3350,FEMALE +Adelie,Torgersen,41.1,18.6,189,3325,MALE +Adelie,Torgersen,36.2,17.2,187,3150,FEMALE +Adelie,Torgersen,37.7,19.8,198,3500,MALE +Adelie,Torgersen,40.2,17,176,3450,FEMALE +Adelie,Torgersen,41.4,18.5,202,3875,MALE +Adelie,Torgersen,35.2,15.9,186,3050,FEMALE +Adelie,Torgersen,40.6,19,199,4000,MALE +Adelie,Torgersen,38.8,17.6,191,3275,FEMALE +Adelie,Torgersen,41.5,18.3,195,4300,MALE +Adelie,Torgersen,39,17.1,191,3050,FEMALE +Adelie,Torgersen,44.1,18,210,4000,MALE +Adelie,Torgersen,38.5,17.9,190,3325,FEMALE +Adelie,Torgersen,43.1,19.2,197,3500,MALE +Adelie,Dream,36.8,18.5,193,3500,FEMALE +Adelie,Dream,37.5,18.5,199,4475,MALE +Adelie,Dream,38.1,17.6,187,3425,FEMALE +Adelie,Dream,41.1,17.5,190,3900,MALE +Adelie,Dream,35.6,17.5,191,3175,FEMALE +Adelie,Dream,40.2,20.1,200,3975,MALE +Adelie,Dream,37,16.5,185,3400,FEMALE +Adelie,Dream,39.7,17.9,193,4250,MALE +Adelie,Dream,40.2,17.1,193,3400,FEMALE +Adelie,Dream,40.6,17.2,187,3475,MALE +Adelie,Dream,32.1,15.5,188,3050,FEMALE +Adelie,Dream,40.7,17,190,3725,MALE +Adelie,Dream,37.3,16.8,192,3000,FEMALE +Adelie,Dream,39,18.7,185,3650,MALE +Adelie,Dream,39.2,18.6,190,4250,MALE +Adelie,Dream,36.6,18.4,184,3475,FEMALE +Adelie,Dream,36,17.8,195,3450,FEMALE +Adelie,Dream,37.8,18.1,193,3750,MALE +Adelie,Dream,36,17.1,187,3700,FEMALE +Adelie,Dream,41.5,18.5,201,4000,MALE +Chinstrap,Dream,46.5,17.9,192,3500,FEMALE +Chinstrap,Dream,50,19.5,196,3900,MALE +Chinstrap,Dream,51.3,19.2,193,3650,MALE +Chinstrap,Dream,45.4,18.7,188,3525,FEMALE +Chinstrap,Dream,52.7,19.8,197,3725,MALE +Chinstrap,Dream,45.2,17.8,198,3950,FEMALE +Chinstrap,Dream,46.1,18.2,178,3250,FEMALE +Chinstrap,Dream,51.3,18.2,197,3750,MALE +Chinstrap,Dream,46,18.9,195,4150,FEMALE +Chinstrap,Dream,51.3,19.9,198,3700,MALE +Chinstrap,Dream,46.6,17.8,193,3800,FEMALE +Chinstrap,Dream,51.7,20.3,194,3775,MALE +Chinstrap,Dream,47,17.3,185,3700,FEMALE +Chinstrap,Dream,52,18.1,201,4050,MALE +Chinstrap,Dream,45.9,17.1,190,3575,FEMALE +Chinstrap,Dream,50.5,19.6,201,4050,MALE +Chinstrap,Dream,50.3,20,197,3300,MALE +Chinstrap,Dream,58,17.8,181,3700,FEMALE +Chinstrap,Dream,46.4,18.6,190,3450,FEMALE +Chinstrap,Dream,49.2,18.2,195,4400,MALE +Chinstrap,Dream,42.4,17.3,181,3600,FEMALE +Chinstrap,Dream,48.5,17.5,191,3400,MALE +Chinstrap,Dream,43.2,16.6,187,2900,FEMALE +Chinstrap,Dream,50.6,19.4,193,3800,MALE +Chinstrap,Dream,46.7,17.9,195,3300,FEMALE +Chinstrap,Dream,52,19,197,4150,MALE +Chinstrap,Dream,50.5,18.4,200,3400,FEMALE +Chinstrap,Dream,49.5,19,200,3800,MALE +Chinstrap,Dream,46.4,17.8,191,3700,FEMALE +Chinstrap,Dream,52.8,20,205,4550,MALE +Chinstrap,Dream,40.9,16.6,187,3200,FEMALE +Chinstrap,Dream,54.2,20.8,201,4300,MALE +Chinstrap,Dream,42.5,16.7,187,3350,FEMALE +Chinstrap,Dream,51,18.8,203,4100,MALE +Chinstrap,Dream,49.7,18.6,195,3600,MALE +Chinstrap,Dream,47.5,16.8,199,3900,FEMALE +Chinstrap,Dream,47.6,18.3,195,3850,FEMALE +Chinstrap,Dream,52,20.7,210,4800,MALE +Chinstrap,Dream,46.9,16.6,192,2700,FEMALE +Chinstrap,Dream,53.5,19.9,205,4500,MALE +Chinstrap,Dream,49,19.5,210,3950,MALE +Chinstrap,Dream,46.2,17.5,187,3650,FEMALE +Chinstrap,Dream,50.9,19.1,196,3550,MALE +Chinstrap,Dream,45.5,17,196,3500,FEMALE +Chinstrap,Dream,50.9,17.9,196,3675,FEMALE +Chinstrap,Dream,50.8,18.5,201,4450,MALE +Chinstrap,Dream,50.1,17.9,190,3400,FEMALE +Chinstrap,Dream,49,19.6,212,4300,MALE +Chinstrap,Dream,51.5,18.7,187,3250,MALE +Chinstrap,Dream,49.8,17.3,198,3675,FEMALE +Chinstrap,Dream,48.1,16.4,199,3325,FEMALE +Chinstrap,Dream,51.4,19,201,3950,MALE +Chinstrap,Dream,45.7,17.3,193,3600,FEMALE +Chinstrap,Dream,50.7,19.7,203,4050,MALE +Chinstrap,Dream,42.5,17.3,187,3350,FEMALE +Chinstrap,Dream,52.2,18.8,197,3450,MALE +Chinstrap,Dream,45.2,16.6,191,3250,FEMALE +Chinstrap,Dream,49.3,19.9,203,4050,MALE +Chinstrap,Dream,50.2,18.8,202,3800,MALE +Chinstrap,Dream,45.6,19.4,194,3525,FEMALE +Chinstrap,Dream,51.9,19.5,206,3950,MALE +Chinstrap,Dream,46.8,16.5,189,3650,FEMALE +Chinstrap,Dream,45.7,17,195,3650,FEMALE +Chinstrap,Dream,55.8,19.8,207,4000,MALE +Chinstrap,Dream,43.5,18.1,202,3400,FEMALE +Chinstrap,Dream,49.6,18.2,193,3775,MALE +Chinstrap,Dream,50.8,19,210,4100,MALE +Chinstrap,Dream,50.2,18.7,198,3775,FEMALE +Gentoo,Biscoe,46.1,13.2,211,4500,FEMALE +Gentoo,Biscoe,50,16.3,230,5700,MALE +Gentoo,Biscoe,48.7,14.1,210,4450,FEMALE +Gentoo,Biscoe,50,15.2,218,5700,MALE +Gentoo,Biscoe,47.6,14.5,215,5400,MALE +Gentoo,Biscoe,46.5,13.5,210,4550,FEMALE +Gentoo,Biscoe,45.4,14.6,211,4800,FEMALE +Gentoo,Biscoe,46.7,15.3,219,5200,MALE +Gentoo,Biscoe,43.3,13.4,209,4400,FEMALE +Gentoo,Biscoe,46.8,15.4,215,5150,MALE +Gentoo,Biscoe,40.9,13.7,214,4650,FEMALE +Gentoo,Biscoe,49,16.1,216,5550,MALE +Gentoo,Biscoe,45.5,13.7,214,4650,FEMALE +Gentoo,Biscoe,48.4,14.6,213,5850,MALE +Gentoo,Biscoe,45.8,14.6,210,4200,FEMALE +Gentoo,Biscoe,49.3,15.7,217,5850,MALE +Gentoo,Biscoe,42,13.5,210,4150,FEMALE +Gentoo,Biscoe,49.2,15.2,221,6300,MALE +Gentoo,Biscoe,46.2,14.5,209,4800,FEMALE +Gentoo,Biscoe,48.7,15.1,222,5350,MALE +Gentoo,Biscoe,50.2,14.3,218,5700,MALE +Gentoo,Biscoe,45.1,14.5,215,5000,FEMALE +Gentoo,Biscoe,46.5,14.5,213,4400,FEMALE +Gentoo,Biscoe,46.3,15.8,215,5050,MALE +Gentoo,Biscoe,42.9,13.1,215,5000,FEMALE +Gentoo,Biscoe,46.1,15.1,215,5100,MALE +Gentoo,Biscoe,44.5,14.3,216,4100, +Gentoo,Biscoe,47.8,15,215,5650,MALE +Gentoo,Biscoe,48.2,14.3,210,4600,FEMALE +Gentoo,Biscoe,50,15.3,220,5550,MALE +Gentoo,Biscoe,47.3,15.3,222,5250,MALE +Gentoo,Biscoe,42.8,14.2,209,4700,FEMALE +Gentoo,Biscoe,45.1,14.5,207,5050,FEMALE +Gentoo,Biscoe,59.6,17,230,6050,MALE +Gentoo,Biscoe,49.1,14.8,220,5150,FEMALE +Gentoo,Biscoe,48.4,16.3,220,5400,MALE +Gentoo,Biscoe,42.6,13.7,213,4950,FEMALE +Gentoo,Biscoe,44.4,17.3,219,5250,MALE +Gentoo,Biscoe,44,13.6,208,4350,FEMALE +Gentoo,Biscoe,48.7,15.7,208,5350,MALE +Gentoo,Biscoe,42.7,13.7,208,3950,FEMALE +Gentoo,Biscoe,49.6,16,225,5700,MALE +Gentoo,Biscoe,45.3,13.7,210,4300,FEMALE +Gentoo,Biscoe,49.6,15,216,4750,MALE +Gentoo,Biscoe,50.5,15.9,222,5550,MALE +Gentoo,Biscoe,43.6,13.9,217,4900,FEMALE +Gentoo,Biscoe,45.5,13.9,210,4200,FEMALE +Gentoo,Biscoe,50.5,15.9,225,5400,MALE +Gentoo,Biscoe,44.9,13.3,213,5100,FEMALE +Gentoo,Biscoe,45.2,15.8,215,5300,MALE +Gentoo,Biscoe,46.6,14.2,210,4850,FEMALE +Gentoo,Biscoe,48.5,14.1,220,5300,MALE +Gentoo,Biscoe,45.1,14.4,210,4400,FEMALE +Gentoo,Biscoe,50.1,15,225,5000,MALE +Gentoo,Biscoe,46.5,14.4,217,4900,FEMALE +Gentoo,Biscoe,45,15.4,220,5050,MALE +Gentoo,Biscoe,43.8,13.9,208,4300,FEMALE +Gentoo,Biscoe,45.5,15,220,5000,MALE +Gentoo,Biscoe,43.2,14.5,208,4450,FEMALE +Gentoo,Biscoe,50.4,15.3,224,5550,MALE +Gentoo,Biscoe,45.3,13.8,208,4200,FEMALE +Gentoo,Biscoe,46.2,14.9,221,5300,MALE +Gentoo,Biscoe,45.7,13.9,214,4400,FEMALE +Gentoo,Biscoe,54.3,15.7,231,5650,MALE +Gentoo,Biscoe,45.8,14.2,219,4700,FEMALE +Gentoo,Biscoe,49.8,16.8,230,5700,MALE +Gentoo,Biscoe,46.2,14.4,214,4650, +Gentoo,Biscoe,49.5,16.2,229,5800,MALE +Gentoo,Biscoe,43.5,14.2,220,4700,FEMALE +Gentoo,Biscoe,50.7,15,223,5550,MALE +Gentoo,Biscoe,47.7,15,216,4750,FEMALE +Gentoo,Biscoe,46.4,15.6,221,5000,MALE +Gentoo,Biscoe,48.2,15.6,221,5100,MALE +Gentoo,Biscoe,46.5,14.8,217,5200,FEMALE +Gentoo,Biscoe,46.4,15,216,4700,FEMALE +Gentoo,Biscoe,48.6,16,230,5800,MALE +Gentoo,Biscoe,47.5,14.2,209,4600,FEMALE +Gentoo,Biscoe,51.1,16.3,220,6000,MALE +Gentoo,Biscoe,45.2,13.8,215,4750,FEMALE +Gentoo,Biscoe,45.2,16.4,223,5950,MALE +Gentoo,Biscoe,49.1,14.5,212,4625,FEMALE +Gentoo,Biscoe,52.5,15.6,221,5450,MALE +Gentoo,Biscoe,47.4,14.6,212,4725,FEMALE +Gentoo,Biscoe,50,15.9,224,5350,MALE +Gentoo,Biscoe,44.9,13.8,212,4750,FEMALE +Gentoo,Biscoe,50.8,17.3,228,5600,MALE +Gentoo,Biscoe,43.4,14.4,218,4600,FEMALE +Gentoo,Biscoe,51.3,14.2,218,5300,MALE +Gentoo,Biscoe,47.5,14,212,4875,FEMALE +Gentoo,Biscoe,52.1,17,230,5550,MALE +Gentoo,Biscoe,47.5,15,218,4950,FEMALE +Gentoo,Biscoe,52.2,17.1,228,5400,MALE +Gentoo,Biscoe,45.5,14.5,212,4750,FEMALE +Gentoo,Biscoe,49.5,16.1,224,5650,MALE +Gentoo,Biscoe,44.5,14.7,214,4850,FEMALE +Gentoo,Biscoe,50.8,15.7,226,5200,MALE +Gentoo,Biscoe,49.4,15.8,216,4925,MALE +Gentoo,Biscoe,46.9,14.6,222,4875,FEMALE +Gentoo,Biscoe,48.4,14.4,203,4625,FEMALE +Gentoo,Biscoe,51.1,16.5,225,5250,MALE +Gentoo,Biscoe,48.5,15,219,4850,FEMALE +Gentoo,Biscoe,55.9,17,228,5600,MALE +Gentoo,Biscoe,47.2,15.5,215,4975,FEMALE +Gentoo,Biscoe,49.1,15,228,5500,MALE +Gentoo,Biscoe,47.3,13.8,216,4725, +Gentoo,Biscoe,46.8,16.1,215,5500,MALE +Gentoo,Biscoe,41.7,14.7,210,4700,FEMALE +Gentoo,Biscoe,53.4,15.8,219,5500,MALE +Gentoo,Biscoe,43.3,14,208,4575,FEMALE +Gentoo,Biscoe,48.1,15.1,209,5500,MALE +Gentoo,Biscoe,50.5,15.2,216,5000,FEMALE +Gentoo,Biscoe,49.8,15.9,229,5950,MALE +Gentoo,Biscoe,43.5,15.2,213,4650,FEMALE +Gentoo,Biscoe,51.5,16.3,230,5500,MALE +Gentoo,Biscoe,46.2,14.1,217,4375,FEMALE +Gentoo,Biscoe,55.1,16,230,5850,MALE +Gentoo,Biscoe,44.5,15.7,217,4875, +Gentoo,Biscoe,48.8,16.2,222,6000,MALE +Gentoo,Biscoe,47.2,13.7,214,4925,FEMALE +Gentoo,Biscoe,,,,, +Gentoo,Biscoe,46.8,14.3,215,4850,FEMALE +Gentoo,Biscoe,50.4,15.7,222,5750,MALE +Gentoo,Biscoe,45.2,14.8,212,5200,FEMALE +Gentoo,Biscoe,49.9,16.1,213,5400,MALE diff --git a/docs/examples/openai/tool_hvplot.json b/docs/examples/applicable_recipes/tool_hvplot.json similarity index 90% rename from docs/examples/openai/tool_hvplot.json rename to docs/examples/applicable_recipes/tool_hvplot.json index a624580..64aea70 100644 --- a/docs/examples/openai/tool_hvplot.json +++ b/docs/examples/applicable_recipes/tool_hvplot.json @@ -8,7 +8,11 @@ "properties": { "cnorm": { "type": "string", - "enum": ["linear", "log", "eq_hist"], + "enum": [ + "linear", + "log", + "eq_hist" + ], "default": "linear", "description": "Color scaling which must be one of 'linear', 'log' or 'eq_hist'" }, @@ -137,7 +141,13 @@ }, "tools": { "type": "array", - "default": ["save", "pan", "wheel_zoom", "box_zoom", "reset"], + "default": [ + "save", + "pan", + "wheel_zoom", + "box_zoom", + "reset" + ], "items": { "type": "string", "enum": [ @@ -175,7 +185,12 @@ }, { "type": "string", - "enum": ["top", "bottom", "left", "right"], + "enum": [ + "top", + "bottom", + "left", + "right" + ], "description": "Whether to place the x-axis at the 'top' or 'bottom'." } ] @@ -189,7 +204,10 @@ }, { "type": "string", - "enum": ["left", "right"], + "enum": [ + "left", + "right" + ], "description": "Whether to place the y-axis at the 'left' or 'right'." } ] @@ -268,6 +286,14 @@ "type": "number", "default": 0.0, "description": "Rotates the axis ticks along the x-axis by the specified number of degrees." + }, + "groupby": { + "type": "string", + "description": "The column to group by" + }, + "by": { + "type": "string", + "description": "The column to overlay by" } } } diff --git a/docs/examples/applicable_recipes/tool_renderer.json b/docs/examples/applicable_recipes/tool_renderer.json new file mode 100644 index 0000000..be08eef --- /dev/null +++ b/docs/examples/applicable_recipes/tool_renderer.json @@ -0,0 +1,21 @@ +{ + "type": "function", + "function": { + "name": "renderer", + "description": "Renders a plot using a named backend like 'bokeh', 'matplotlib'", + "parameters": { + "type": "object", + "properties": { + "backend": { + "type": "string", + "enum": ["bokeh", "matplotlib"], + "default": "bokeh", + "description": "The name of the backend to render the plot with. One of 'bokeh', 'matplotlib'" + } + }, + "required": [ + "backend" + ] + } + } +} diff --git a/docs/examples/basics/basic_chat.py b/docs/examples/basics/basic_chat.py deleted file mode 100644 index bcb4a2a..0000000 --- a/docs/examples/basics/basic_chat.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` and a `callback` function to respond. - -The chatbot Assistant echoes back the message entered by the User. -""" - -import panel as pn - -pn.extension() - - -def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - message = f"Echoing {user}: {contents}" - return message - - -chat_interface = pn.chat.ChatInterface(callback=callback) -chat_interface.send( - "Enter a message in the TextInput below and receive an echo!", - user="System", - respond=False, -) -chat_interface.servable() diff --git a/docs/examples/basics/basic_custom_widgets.py b/docs/examples/basics/basic_custom_widgets.py deleted file mode 100644 index b5cb8af..0000000 --- a/docs/examples/basics/basic_custom_widgets.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` and custom widgets, -like `TextAreaInput` and `FileInput`, to create a chatbot that counts -the number of lines in a message or file. -""" - -import panel as pn - -pn.extension() - - -def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - lines = contents.strip().count("\n") - message = f"This snippet has {lines + 1} lines." - return message - - -text_input = pn.widgets.TextInput(placeholder="Send a message") -text_area_input = pn.widgets.TextAreaInput( - auto_grow=True, placeholder="Click Send to count lines." -) -file_input = pn.widgets.FileInput() -chat_interface = pn.chat.ChatInterface( - callback=callback, widgets=[text_input, text_area_input, file_input] -) -chat_interface.send( - "Enter a message in the TextAreaInput below to count how many lines there is, " - "or upload a file to count the number of lines in the file.", - user="System", - respond=False, -) -chat_interface.servable() diff --git a/docs/examples/chat_features/chained_response.py b/docs/examples/chat_features/chained_response.py new file mode 100644 index 0000000..eac00dd --- /dev/null +++ b/docs/examples/chat_features/chained_response.py @@ -0,0 +1,51 @@ +""" +Demonstrates how to chain responses from a single message in the callback. + +Highlight: + +- The `respond` parameter in the `send` method is used to chain responses. +- It's also possible to use `respond` as a method to chain responses. +""" + +from asyncio import sleep + +import panel as pn + +pn.extension() + +PERSON_1 = "Happy User" +PERSON_2 = "Excited User" +PERSON_3 = "Passionate User" + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + await sleep(2) + if user == "User": + instance.send( + f"Hey, {PERSON_2}! Did you hear the user?", + user=PERSON_1, + avatar="😊", + respond=True, # This is the default, but it's here for clarity + ) + elif user == PERSON_1: + user_message = instance.objects[-2] + user_contents = user_message.object + yield pn.chat.ChatMessage( + f'Yeah, they said "{user_contents}"! Did you also hear {PERSON_3}?', + user=PERSON_2, + avatar="😄", + ) + instance.respond() + elif user == PERSON_2: + instance.send( + f"Yup, I heard!", + user=PERSON_3, + avatar="😆", + respond=False, + ) + + +chat_interface = pn.chat.ChatInterface( + help_text="Send a message to start the conversation!", callback=callback +) +chat_interface.servable() diff --git a/docs/examples/chat_features/control_callback_response.py b/docs/examples/chat_features/control_callback_response.py new file mode 100644 index 0000000..a2ef65b --- /dev/null +++ b/docs/examples/chat_features/control_callback_response.py @@ -0,0 +1,58 @@ +""" +Demonstrates how to precisely control the callback response. + +Highlights: + +- Use a placeholder text to display a message while waiting for the response. +- Use a placeholder threshold to control when the placeholder text is displayed. +- Use send instead of stream/yield/return to keep the placeholder text while still sending a message, ensuring respond=False to avoid a recursive loop. +- Use yield to continuously update the response message. +- Use pn.chat.ChatMessage or dict to send a message with a custom user and avatar. +""" + +from asyncio import sleep +from random import choice + +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + await sleep(0.5) + # use send instead of stream/yield/return to keep the placeholder text + # while still sending a message; ensure respond=False to avoid a recursive loop + instance.send( + "Let me flip the coin for you...", user="Game Master", avatar="🎲", respond=False + ) + await sleep(1) + + characters = "/|\\_" + index = 0 + for _ in range(0, 28): + index = (index + 1) % len(characters) + # use yield to continuously update the response message + # use pn.chat.ChatMessage to send a message with a custom user and avatar + yield pn.chat.ChatMessage("\r" + characters[index], user="Coin", avatar="🪙") + await sleep(0.005) + + result = choice(["heads", "tails"]) + if result in contents.lower(): + # equivalently, use a dict instead of a pn.chat.ChatMessage + yield {"object": f"Woohoo, {result}! You win!", "user": "Coin", "avatar": "🎲"} + else: + yield {"object": f"Aw, got {result}. Try again!", "user": "Coin", "avatar": "🎲"} + + +chat_interface = pn.chat.ChatInterface( + widgets=[ + pn.widgets.RadioButtonGroup( + options=["Heads!", "Tails!"], button_type="primary", button_style="outline" + ) + ], + callback=callback, + help_text="Select heads or tails, then click send!", + placeholder_text="Waiting for the result...", + placeholder_threshold=0.1, +) +chat_interface.servable() diff --git a/docs/examples/chat_features/custom_input_widgets.py b/docs/examples/chat_features/custom_input_widgets.py new file mode 100644 index 0000000..a0c415f --- /dev/null +++ b/docs/examples/chat_features/custom_input_widgets.py @@ -0,0 +1,36 @@ +""" +Demonstrates how to use the `ChatInterface` and custom widgets, +like `ChatAreaInput` and `FileInput`, to create a chatbot that counts +the number of lines in a message or file. + +Highlights: + +- The `ChatAreaInput` and `FileInput` widgets are used to create a custom + chatbot that counts the number of lines in a message or file. +- The `callback` function is used to count the number of lines in the message + or file and return the result to the User. +""" + +import panel as pn + +pn.extension() + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + lines = contents.strip().count("\n") + message = f"This snippet has {lines + 1} lines." + return message + + +chat_input = pn.chat.ChatAreaInput(placeholder="Send a message") +file_input = pn.widgets.FileInput(accept=".py") +chat_interface = pn.chat.ChatInterface( + callback=callback, widgets=[chat_input, file_input] +) +chat_interface.send( + "Enter a message in the ChatAreaInput below to count how many lines there is, " + "or upload a Python file to count the number of lines in the file.", + user="System", + respond=False, +) +chat_interface.servable() diff --git a/docs/examples/features/feature_delayed_placeholder.py b/docs/examples/chat_features/delayed_placeholder.py similarity index 74% rename from docs/examples/features/feature_delayed_placeholder.py rename to docs/examples/chat_features/delayed_placeholder.py index eaac1ec..fe05f1a 100644 --- a/docs/examples/features/feature_delayed_placeholder.py +++ b/docs/examples/chat_features/delayed_placeholder.py @@ -1,5 +1,11 @@ """ Demonstrates how to delay the display of the placeholder. + +Highlights: + +- The `placeholder_threshold` parameter is used to delay the display of the placeholder. + If the response time is less than the threshold, the placeholder will not be displayed. +- The `placeholder_text` parameter is used to customize the placeholder text. """ from asyncio import sleep diff --git a/docs/examples/chat_features/echo_chat.py b/docs/examples/chat_features/echo_chat.py new file mode 100644 index 0000000..596b2a2 --- /dev/null +++ b/docs/examples/chat_features/echo_chat.py @@ -0,0 +1,25 @@ +""" +Demonstrates how to use the `ChatInterface` and a `callback` function to respond. + +Highlights: + +- The `ChatInterface` and a `callback` function are used to create a + chatbot that echoes back the message entered by the User. +- The `help_text` parameter is used to provide instructions to the User. +""" + +import panel as pn + +pn.extension() + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + message = f"Echoing {user}: {contents}" + return message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + help_text="Enter a message in the TextInput below and receive an echo!", +) +chat_interface.servable() diff --git a/docs/examples/basics/basic_streaming_chat_async.py b/docs/examples/chat_features/stream_echo_chat.py similarity index 69% rename from docs/examples/basics/basic_streaming_chat_async.py rename to docs/examples/chat_features/stream_echo_chat.py index 14ae119..f8166b5 100644 --- a/docs/examples/basics/basic_streaming_chat_async.py +++ b/docs/examples/chat_features/stream_echo_chat.py @@ -4,6 +4,12 @@ The chatbot Assistant echoes back the message entered by the User in an *async streaming* fashion. + +Highlights: + +- The function is defined as `async` and uses `yield` to stream back responses. +- Initialize `message` first to gather the characters and then `yield` it; + without it, only one letter would be displayed at a time. """ @@ -25,7 +31,7 @@ async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): chat_interface = pn.chat.ChatInterface(callback=callback) chat_interface.send( - "Enter a message in the TextInput below and receive an echo!", + "Enter a message below and receive an echo!", user="System", respond=False, ) diff --git a/docs/examples/chat_features/styled_slim_interface.py b/docs/examples/chat_features/styled_slim_interface.py new file mode 100644 index 0000000..6824b66 --- /dev/null +++ b/docs/examples/chat_features/styled_slim_interface.py @@ -0,0 +1,58 @@ +""" +Demonstrates how to create a slim `ChatInterface` that fits in the sidebar. + +Highlights: + +- The `ChatInterface` is placed in the sidebar. +- Set `show_*` parameters to `False` to hide the respective buttons. +- Use `message_params` to customize the appearance of each chat messages. +""" +import panel as pn + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + message = f"Echoing {user}: {contents}" + return message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + show_send=False, + show_rerun=False, + show_undo=False, + show_clear=False, + show_avatar=False, + show_timestamp=False, + show_button_name=False, + show_reaction_icons=False, + sizing_mode="stretch_width", + height=700, + message_params={ + "stylesheets": [ + """ + .message { + font-size: 1em; + } + .name { + font-size: 0.9em; + } + .timestamp { + font-size: 0.9em; + } + """ + ] + }, +) + +main = """ +We've put a *slim* `ChatInterface` in the sidebar. In the main area you +could add the object you are chatting about +""" + +pn.template.FastListTemplate( + main=[main], + sidebar=[chat_interface], + sidebar_width=500, +).servable() diff --git a/docs/examples/components/component_chat_input.py b/docs/examples/components/component_chat_input.py deleted file mode 100644 index 11f7cc4..0000000 --- a/docs/examples/components/component_chat_input.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -The `ChatInput` widget is a combination of a `TextInput` widget and a `Button`. -When the input is submitted the `TextInput` widget is cleared and ready to accept -a new input. - -If you need a `ChatInput` widget you can copy the code from -[here](https://github.com/holoviz-topics/panel-chat-examples/blob/main/panel_chat_examples/components/chat_input.py). -""" -import panel as pn - -from panel_chat_examples.components import ChatInput - -pn.extension() - -chat_input = ChatInput(placeholder="Say something") - - -def message(prompt): - if not prompt: - return "" - return f"User has sent the following prompt: **{prompt}**" - - -pn.Column(pn.bind(message, chat_input.param.value), chat_input, margin=25).servable() diff --git a/docs/examples/components/component_environment_widget.py b/docs/examples/components/component_environment_widget.py deleted file mode 100644 index e1a0473..0000000 --- a/docs/examples/components/component_environment_widget.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -The [`EnvironmentWidgetBase`](https://github.com/holoviz-topics/panel-chat-examples/blob/main/panel_chat_examples/_environment_widget.py) -class enables you to manage variable values from a combination of custom values, -environment variables and user input. - -Its very useful when you don't have the resources to provide API keys for services -like OpenAI. It will determine which variables have not been set as environment -variables and ask the user for them. -""" -# Longer term we should try to get this widget included in Panel -import panel as pn -import param - -from panel_chat_examples import EnvironmentWidgetBase - -pn.extension() - - -class EnvironmentWidget(EnvironmentWidgetBase): - """An example Environment Widget for managing environment variables""" - - OPENAI_API_KEY = param.String(doc="A key for the OpenAI api") - WEAVIATE_API_KEY = param.String(doc="A key for the Weaviate api") - LANGCHAIN_API_KEY = param.String(doc="A key for the LangChain api") - - -environment = EnvironmentWidget(max_width=1000) -pn.template.FastListTemplate( - title="Environment Widget", - sidebar=[environment], - main=[ - __doc__, - pn.Column( - environment.param.variables_set, - environment.param.variables_not_set, - ), - ], -).servable() diff --git a/docs/examples/components/component_status.py b/docs/examples/components/component_status.py deleted file mode 100644 index 9b6255e..0000000 --- a/docs/examples/components/component_status.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -The `Status` *indicator* can report progress in steps and with -detailed context. - -If you need a `Status` widget you can copy the code from -[here](https://github.com/holoviz-topics/panel-chat-examples/blob/main/panel_chat_examples/components/chat_input/components/status.py). -""" -import time - -import panel as pn - -from panel_chat_examples.components import Status - -status = Status("Downloading data...", sizing_mode="stretch_width") - - -def run(_): - with status.report() as progress: - status.collapsed = False - progress("Searching for data...") - time.sleep(1.5) - progress("Downloading data...") - time.sleep(1.5) - progress("Validating data...") - time.sleep(1.5) - status.collapsed = True - - -run_button = pn.widgets.Button( - name="Run", on_click=run, button_type="primary", button_style="outline" -) - -pn.Column( - status, - run_button, -).servable() diff --git a/docs/examples/features/feature_chained_response.py b/docs/examples/features/feature_chained_response.py deleted file mode 100644 index 739b054..0000000 --- a/docs/examples/features/feature_chained_response.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -Demonstrates how to chain responses in a `ChatInterface`. -""" - -from time import sleep - -import panel as pn - -pn.extension() - -ARM_BOT = "Arm Bot" -LEG_BOT = "Leg Bot" - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - sleep(1) - if user == "User": - yield { - "user": ARM_BOT, - "avatar": "🦾", - "object": f"Hey, {LEG_BOT}! Did you hear the user?", - } - instance.respond() - elif user == ARM_BOT: - user_message = instance.objects[-2] - user_contents = user_message.object - yield { - "user": LEG_BOT, - "avatar": "🦿", - "object": f'Yeah! They said "{user_contents}".', - } - - -chat_interface = pn.chat.ChatInterface(callback=callback) -chat_interface.send("Send a message!", user="System", respond=False) -chat_interface.servable() diff --git a/docs/examples/features/feature_replace_response.py b/docs/examples/features/feature_replace_response.py deleted file mode 100644 index eba9569..0000000 --- a/docs/examples/features/feature_replace_response.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Demonstrates how to update the response of the `ChatInterface`. -""" - -from asyncio import sleep -from random import choice - -import panel as pn - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - yield "Let me flip the coin for you..." - await sleep(1) - - characters = "/|\\_" - index = 0 - for _ in range(0, 28): - index = (index + 1) % len(characters) - yield "\r" + characters[index] - await sleep(0.005) - - result = choice(["heads", "tails"]) - if result in contents.lower(): - yield f"Woohoo, {result}! You win!" - else: - yield f"Aw, got {result}. Try again!" - - -chat_interface = pn.chat.ChatInterface( - widgets=[ - pn.widgets.RadioButtonGroup( - options=["Heads!", "Tails!"], button_type="primary", button_style="outline" - ) - ], - callback=callback, - callback_user="Game Master", -) -chat_interface.send( - "Select heads or tails, then click send!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/features/feature_slim_interface.py b/docs/examples/features/feature_slim_interface.py deleted file mode 100644 index 58de363..0000000 --- a/docs/examples/features/feature_slim_interface.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -Demonstrates how to create a slim `ChatInterface` that fits in the sidebar. -""" -import panel as pn - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - message = f"Echoing {user}: {contents}" - return message - - -chat_interface = pn.chat.ChatInterface( - callback=callback, - show_send=False, - show_rerun=False, - show_undo=False, - show_clear=False, - show_button_name=False, - sizing_mode="stretch_both", - min_height=200, - width=475, -) -chat_interface.send("Send a message and hear an echo!", user="System", respond=False) - -pn.template.FastListTemplate( - main=[ - """We've put a *slim* `ChatInterface` in the sidebar. In the main area you \ -could add the object you are chatting about""" - ], - sidebar=[chat_interface], - sidebar_width=500, -).servable() diff --git a/docs/examples/kickstart_snippets/langchain_.py b/docs/examples/kickstart_snippets/langchain_.py new file mode 100644 index 0000000..eb8aabc --- /dev/null +++ b/docs/examples/kickstart_snippets/langchain_.py @@ -0,0 +1,85 @@ +""" +Demonstrates how to use LangChain to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +from operator import itemgetter + +import panel as pn +from langchain.memory import ConversationTokenBufferMemory +from langchain_core.messages import AIMessage, HumanMessage +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnableLambda, RunnablePassthrough +from langchain_openai import ChatOpenAI + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + llm.api_key = api_key_input.value + + memory.clear() + for message in instance.serialize(): + if message["role"] == "user": + memory.chat_memory.add_user_message(HumanMessage(**message)) + else: + memory.chat_memory.add_ai_message(AIMessage(**message)) + + response = chain.astream({"user_input": contents}) + + message = "" + async for chunk in response: + message += chunk + yield message + + +llm = ChatOpenAI(model="gpt-3.5-turbo") +memory = ConversationTokenBufferMemory( + return_messages=True, + llm=llm, + memory_key="chat_history", + max_token_limit=8192 - 1024, +) +memory_link = RunnablePassthrough.assign( + chat_history=RunnableLambda(memory.load_memory_variables) + | itemgetter("chat_history") +) +prompt_link = ChatPromptTemplate.from_template( + "{chat_history}\n\nBe a helpful chat bot and answer: {user_input}", +) +output_parser = StrOutputParser() + +chain = ( + {"user_input": RunnablePassthrough()} + | memory_link + | prompt_link + | llm + | output_parser +) + +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT 3.5 Turbo!", + callback_exception="verbose", +) +template = pn.template.FastListTemplate( + title="LangChain OpenAI GPT-3.5", + header_background="#E8B0E6", + main=[chat_interface], + header=[api_key_input], +) +template.servable() diff --git a/docs/examples/kickstart_snippets/llama_cpp_python_.py b/docs/examples/kickstart_snippets/llama_cpp_python_.py new file mode 100644 index 0000000..332cde1 --- /dev/null +++ b/docs/examples/kickstart_snippets/llama_cpp_python_.py @@ -0,0 +1,61 @@ +""" +Demonstrates how to use LlamaCpp with a local, quantized model, like TheBloke's Mistral Instruct v0.2, +with Panel's ChatInterface. + +Highlights: + +- Uses `pn.state.onload` to load the model from Hugging Face Hub when the app is loaded and prevent blocking the app. +- Uses `pn.state.cache` to store the `Llama` instance. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. +""" + +import panel as pn +from huggingface_hub import hf_hub_download +from llama_cpp import Llama + +REPO_ID = "TheBloke/Mistral-7B-Instruct-v0.2-code-ft-GGUF" +FILENAME = "mistral-7b-instruct-v0.2-code-ft.Q5_K_S.gguf" + +pn.extension() + + +def load_model(): + model_path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME) + pn.state.cache["llama"] = Llama( + model_path=model_path, + chat_format="mistral-instruct", + verbose=False, + n_gpu_layers=-1, + ) + chat_interface.disabled = False + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + # memory is a list of messages + messages = instance.serialize() + + llama = pn.state.cache["llama"] + response = llama.create_chat_completion_openai_v1(messages=messages, stream=True) + + message = "" + for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="LlamaCpp", + help_text="Send a message to get a reply from LlamaCpp!", + disabled=True, +) +template = pn.template.FastListTemplate( + title="LlamaCpp Mistral", + header_background="#A0A0A0", + main=[chat_interface], +) +pn.state.onload(load_model) +template.servable() diff --git a/docs/examples/kickstart_snippets/llama_index_.py b/docs/examples/kickstart_snippets/llama_index_.py new file mode 100644 index 0000000..b1e1fa0 --- /dev/null +++ b/docs/examples/kickstart_snippets/llama_index_.py @@ -0,0 +1,64 @@ +""" +Demonstrates how to use LlamaIndex to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +import panel as pn +from llama_index.core.agent import ReActAgent +from llama_index.core.llms import ChatMessage +from llama_index.core.tools import FunctionTool +from llama_index.llms.openai import OpenAI + +pn.extension() + + +def multiply(a: int, b: int) -> int: + """Multiple two integers and returns the result integer""" + return a * b + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + llm.api_key = api_key_input.value + + # memory is a list of messages + messages = [ChatMessage(**message) for message in instance.serialize()] + + response = await llm.astream_chat( + model="gpt-3.5-turbo", + messages=messages, + ) + + async for chunk in response: + message = chunk.message.content + yield str(message) + + +llm = OpenAI(model="gpt-3.5-turbo-0613") + +multiply_tool = FunctionTool.from_defaults(fn=multiply) +agent = ReActAgent.from_tools([multiply_tool], llm=llm, verbose=True) + +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT 3.5 Turbo!", +) +template = pn.template.FastListTemplate( + title="LlamaIndex OpenAI GPT-3.5", + header_background="#83CBF2", + main=[chat_interface], + header=[api_key_input], +) +template.servable() diff --git a/docs/examples/kickstart_snippets/mistralai_.py b/docs/examples/kickstart_snippets/mistralai_.py new file mode 100644 index 0000000..d4883ae --- /dev/null +++ b/docs/examples/kickstart_snippets/mistralai_.py @@ -0,0 +1,62 @@ +""" +Demonstrates how to use MistralAI's Small API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `MISTRAL_API_KEY` environment variable. +- Runs `pn.bind` to update the `MistralAsyncClient` when the `api_key` changes and pn.state.cache to store the client. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. +""" + +import panel as pn +from mistralai.async_client import MistralAsyncClient + +pn.extension() + + +def update_api_key(api_key): + # use api_key_input.value if set, otherwise use MISTRAL_API_KEY + pn.state.cache["aclient"] = ( + MistralAsyncClient(api_key=api_key) if api_key else MistralAsyncClient() + ) + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + # memory is a list of messages + messages = instance.serialize() + + response = pn.state.cache["aclient"].chat_stream( + model="mistral-small", + messages=messages, + ) + + message = "" + async for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +api_key_input = pn.widgets.PasswordInput( + placeholder="Uses $MISTRAL_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +pn.bind(update_api_key, api_key_input, watch=True) +api_key_input.param.trigger("value") + +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="MistralAI", + help_text="Send a message to get a reply from MistralAI!", + callback_exception="verbose", +) +template = pn.template.FastListTemplate( + title="MistralAI Small", + header_background="#FF7000", + main=[chat_interface], + header=[api_key_input], +) +template.servable() diff --git a/docs/examples/kickstart_snippets/openai_.py b/docs/examples/kickstart_snippets/openai_.py new file mode 100644 index 0000000..e7cc7ea --- /dev/null +++ b/docs/examples/kickstart_snippets/openai_.py @@ -0,0 +1,56 @@ +""" +Demonstrates how to use OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +import panel as pn +from openai import AsyncOpenAI + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + aclient.api_key = api_key_input.value + + # memory is a list of messages + messages = instance.serialize() + + response = await aclient.chat.completions.create( + model="gpt-3.5-turbo", + messages=messages, + stream=True, + ) + + message = "" + async for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +aclient = AsyncOpenAI() +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT-3.5 Turbo!", +) +template = pn.template.FastListTemplate( + title="OpenAI GPT-3.5", + header_background="#212121", + main=[chat_interface], + header=[api_key_input], +) +template.servable() diff --git a/docs/examples/langchain/langchain_lcel.py b/docs/examples/langchain/langchain_lcel.py deleted file mode 100644 index 7b5e62b..0000000 --- a/docs/examples/langchain/langchain_lcel.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -[LangChain Expression Language](https://python.langchain.com/docs/expression_language/) (LCEL). -""" - -import panel as pn -from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate -from langchain_core.runnables import RunnablePassthrough -from langchain_openai import ChatOpenAI - -pn.extension() - - -async def callback(contents, user, instance): - message = "" - async for token in chain.astream(contents): - message += token - yield message - - -prompt = ChatPromptTemplate.from_template("Tell me a top-notch joke about {topic}") -model = ChatOpenAI(model="gpt-3.5-turbo") -output_parser = StrOutputParser() -chain = {"topic": RunnablePassthrough()} | prompt | model | output_parser - -chat_interface = pn.chat.ChatInterface( - pn.chat.ChatMessage( - "Offer a topic and ChatGPT will respond with a joke!", user="System" - ), - callback=callback, - callback_user="ChatGPT", -) -chat_interface.servable() diff --git a/docs/examples/langchain/langchain_llama_and_mistral.py b/docs/examples/langchain/langchain_llama_and_mistral.py deleted file mode 100644 index d1dbdb6..0000000 --- a/docs/examples/langchain/langchain_llama_and_mistral.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -[Llama2](https://ai.meta.com/llama/) and [Mistral](https://docs.mistral.ai). -""" - -import panel as pn -from langchain.chains import LLMChain -from langchain.llms import CTransformers -from langchain.prompts import PromptTemplate - -pn.extension() - -MODEL_KWARGS = { - "llama": { - "model": "TheBloke/Llama-2-7b-Chat-GGUF", - "model_file": "llama-2-7b-chat.Q5_K_M.gguf", - }, - "mistral": { - "model": "TheBloke/Mistral-7B-Instruct-v0.1-GGUF", - "model_file": "mistral-7b-instruct-v0.1.Q4_K_M.gguf", - }, -} - -# We cache the chains and responses to speed up things -llm_chains = pn.state.cache["llm_chains"] = pn.state.cache.get("llm_chains", {}) -responses = pn.state.cache["responses"] = pn.state.cache.get("responses", {}) - -TEMPLATE = """[INST] You are a friendly chat bot who's willing to help answer the -user: -{user_input} [/INST] -""" - -CONFIG = {"max_new_tokens": 256, "temperature": 0.5} - - -def _get_llm_chain(model, template=TEMPLATE, config=CONFIG): - llm = CTransformers(**MODEL_KWARGS[model], config=config, streaming=True) - prompt = PromptTemplate(template=template, input_variables=["user_input"]) - llm_chain = LLMChain(prompt=prompt, llm=llm) - return llm_chain - - -# Cannot use pn.cache due to https://github.com/holoviz/panel/issues/4236 -async def _get_response(contents: str, model: str) -> str: - key = (contents, model) - if key in responses: - return responses[key] - - llm_chain = llm_chains[model] - response = responses[key] = await llm_chain.apredict(user_input=contents) - return response - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - for model in MODEL_KWARGS: - if model not in llm_chains: - instance.placeholder_text = ( - f"Downloading {model}, this may take a few minutes, " - f"or longer, depending on your internet connection." - ) - llm_chains[model] = _get_llm_chain(model) - - message = None - response = await _get_response(contents, model) - for chunk in response: - message = instance.stream(chunk, user=model.title(), message=message) - - -chat_interface = pn.chat.ChatInterface(callback=callback, placeholder_threshold=0.1) -chat_interface.send( - "Send a message to get a reply from both Llama 2 and Mistral (7B)!", - user="System", - respond=False, -) -chat_interface.servable() diff --git a/docs/examples/langchain/langchain_math_assistant.py b/docs/examples/langchain/langchain_math_assistant.py deleted file mode 100644 index 90efacd..0000000 --- a/docs/examples/langchain/langchain_math_assistant.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create -a math chatbot using OpenAI and the `PanelCallbackHandler` for -[LangChain](https://python.langchain.com/docs/get_started/introduction). See -[LangChain Callbacks](https://python.langchain.com/docs/modules/callbacks/). -""" - -import panel as pn -from langchain.chains import LLMMathChain -from langchain.llms import OpenAI - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - final_answer = await llm_math.arun(question=contents) - instance.stream(final_answer, message=instance.objects[-1]) - - -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="Langchain") -chat_interface.send( - "Send a math question to get an answer from MathGPT!", user="System", respond=False -) - -callback_handler = pn.chat.langchain.PanelCallbackHandler(chat_interface) -llm = OpenAI(streaming=True, callbacks=[callback_handler]) -llm_math = LLMMathChain.from_llm(llm, verbose=True) -chat_interface.servable() diff --git a/docs/examples/langchain/langchain_pdf_assistant.py b/docs/examples/langchain/langchain_pdf_assistant.py deleted file mode 100644 index c34043f..0000000 --- a/docs/examples/langchain/langchain_pdf_assistant.py +++ /dev/null @@ -1,184 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to chat about a PDF using -OpenAI, [LangChain](https://python.langchain.com/docs/get_started/introduction) and -[Chroma](https://docs.trychroma.com/). -""" - -import tempfile -from pathlib import Path - -import panel as pn -import param -from langchain.chains import RetrievalQA -from langchain.document_loaders import PyPDFLoader -from langchain.embeddings import OpenAIEmbeddings -from langchain.llms import OpenAI -from langchain.text_splitter import CharacterTextSplitter -from langchain.vectorstores import Chroma - -from panel_chat_examples import EnvironmentWidgetBase - -EXAMPLE_PDF = Path(__file__).parent / "example.pdf" -TTL = 1800 # 30 minutes - -pn.extension() - -# Define the Retrieval Question/ Answer Chain -# We use caching to speed things up - - -@pn.cache(ttl=TTL) -def _get_texts(pdf): - # load documents - with tempfile.NamedTemporaryFile("wb", delete=False) as f: - f.write(pdf) - file_name = f.name - loader = PyPDFLoader(file_name) - documents = loader.load() - - # split the documents into chunks - text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) - return text_splitter.split_documents(documents) - - -@pn.cache(ttl=TTL) -def _get_vector_db(pdf, openai_api_key): - texts = _get_texts(pdf) - # select which embeddings we want to use - embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) - # create the vectorestore to use as the index - return Chroma.from_documents(texts, embeddings) - - -@pn.cache(ttl=TTL) -def _get_retriever(pdf, openai_api_key: str, number_of_chunks: int): - db = _get_vector_db(pdf, openai_api_key) - return db.as_retriever( - search_type="similarity", search_kwargs={"k": number_of_chunks} - ) - - -@pn.cache(ttl=TTL) -def _get_retrieval_qa( - pdf: bytes, number_of_chunks: int, chain_type: str, openai_api_key: str -): - retriever = _get_retriever(pdf, openai_api_key, number_of_chunks) - return RetrievalQA.from_chain_type( - llm=OpenAI(openai_api_key=openai_api_key), - chain_type=chain_type, - retriever=retriever, - return_source_documents=True, - verbose=True, - ) - - -def _get_response(contents): - qa = _get_retrieval_qa( - state.pdf, state.number_of_chunks, state.chain_type, environ.OPENAI_API_KEY - ) - response = qa({"query": contents}) - chunks = [] - - for chunk in response["source_documents"][::-1]: - name = f"Chunk {chunk.metadata['page']}" - content = chunk.page_content - chunks.insert(0, (name, content)) - return response, chunks - - -# Define the Application State -class EnvironmentWidget(EnvironmentWidgetBase): - OPENAI_API_KEY: str = param.String() - - -class State(param.Parameterized): - pdf: bytes = param.Bytes() - number_of_chunks: int = param.Integer(default=2, bounds=(1, 5), step=1) - chain_type: str = param.Selector( - objects=["stuff", "map_reduce", "refine", "map_rerank"] - ) - - -environ = EnvironmentWidget() -state = State() - -# Define the widgets -pdf_input = pn.widgets.FileInput.from_param(state.param.pdf, accept=".pdf", height=50) -text_input = pn.widgets.TextInput(placeholder="First, upload a PDF!") -chain_type_input = pn.widgets.RadioButtonGroup.from_param( - state.param.chain_type, - orientation="vertical", - sizing_mode="stretch_width", - button_type="primary", - button_style="outline", -) - -# Define and configure the ChatInterface - - -def _get_validation_message(): - pdf = state.pdf - openai_api_key = environ.OPENAI_API_KEY - if not pdf and not openai_api_key: - return "Please first enter an OpenAI Api key and upload a PDF!" - if not pdf: - return "Please first upload a PDF!" - if not openai_api_key: - return "Please first enter an OpenAI Api key!" - return "" - - -def _send_not_ready_message(chat_interface) -> bool: - message = _get_validation_message() - - if message: - chat_interface.send({"user": "System", "object": message}, respond=False) - return bool(message) - - -async def respond(contents, user, chat_interface): - if _send_not_ready_message(chat_interface): - return - if chat_interface.active == 0: - chat_interface.active = 1 - chat_interface.active_widget.placeholder = "Ask questions here!" - yield {"user": "OpenAI", "object": "Let's chat about the PDF!"} - return - - response, documents = _get_response(contents) - pages_layout = pn.Accordion(*documents, sizing_mode="stretch_width", max_width=800) - answers = pn.Column(response["result"], pages_layout) - - yield {"user": "OpenAI", "object": answers} - - -chat_interface = pn.chat.ChatInterface( - callback=respond, - sizing_mode="stretch_width", - widgets=[pdf_input, text_input], - disabled=True, -) - - -@pn.depends(state.param.pdf, environ.param.OPENAI_API_KEY, watch=True) -def _enable_chat_interface(pdf, openai_api_key): - if pdf and openai_api_key: - chat_interface.disabled = False - else: - chat_interface.disabled = True - - -_send_not_ready_message(chat_interface) - -## Wrap the app in a nice template - -template = pn.template.BootstrapTemplate( - sidebar=[ - environ, - state.param.number_of_chunks, - "Chain Type:", - chain_type_input, - ], - main=[chat_interface], -) -template.servable() diff --git a/docs/examples/langchain/langchain_streaming_lcel_with_memory.py b/docs/examples/langchain/langchain_streaming_lcel_with_memory.py deleted file mode 100644 index abafea5..0000000 --- a/docs/examples/langchain/langchain_streaming_lcel_with_memory.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -[LangChain Expression Language](https://python.langchain.com/docs/expression_language/) (LCEL) -with streaming and memory. -""" - -from operator import itemgetter - -import panel as pn -from langchain.memory import ConversationSummaryBufferMemory -from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder -from langchain_core.runnables import RunnableLambda, RunnablePassthrough -from langchain_openai import ChatOpenAI - -pn.extension() - -SYSTEM_PROMPT = "Try to be a silly comedian." - - -async def callback(contents, user, instance): - message = "" - inputs = {"input": contents} - async for token in chain.astream(inputs): - message += token - yield message - memory.save_context(inputs, {"output": message}) - - -model = ChatOpenAI(model="gpt-3.5-turbo") -memory = ConversationSummaryBufferMemory(return_messages=True, llm=model) -prompt = ChatPromptTemplate.from_messages( - [ - ("system", SYSTEM_PROMPT), - MessagesPlaceholder(variable_name="history"), - ("human", "{input}"), - ] -) -output_parser = StrOutputParser() -chain = ( - RunnablePassthrough.assign( - history=RunnableLambda(memory.load_memory_variables) | itemgetter("history") - ) - | prompt - | model - | output_parser -) - -chat_interface = pn.chat.ChatInterface( - pn.chat.ChatMessage( - "Offer a topic and ChatGPT will try to be funny!", user="System" - ), - callback=callback, - callback_user="ChatGPT", -) -chat_interface.servable() diff --git a/docs/examples/langchain/langchain_with_memory.py b/docs/examples/langchain/langchain_with_memory.py deleted file mode 100644 index 86dd9ee..0000000 --- a/docs/examples/langchain/langchain_with_memory.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot with memory using -OpenAI and [LangChain](https://python.langchain.com/docs/get_started/introduction). -""" - -import panel as pn -from langchain.chains import ConversationChain -from langchain.chat_models import ChatOpenAI -from langchain.memory import ConversationBufferMemory - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - await chain.apredict(input=contents) - - -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="ChatGPT") -chat_interface.send( - "Send a message to get a reply from ChatGPT!", user="System", respond=False -) - -callback_handler = pn.chat.langchain.PanelCallbackHandler(chat_interface) -llm = ChatOpenAI(streaming=True, callbacks=[callback_handler]) -memory = ConversationBufferMemory() -chain = ConversationChain(llm=llm, memory=memory) -chat_interface.servable() diff --git a/docs/examples/mistral/mistral_and_llama.py b/docs/examples/mistral/mistral_and_llama.py deleted file mode 100644 index 5a459e3..0000000 --- a/docs/examples/mistral/mistral_and_llama.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Demonstrates how to use the ChatInterface widget to create a chatbot using -Llama2 and Mistral. -""" - -import panel as pn -from ctransformers import AutoModelForCausalLM - -pn.extension() - -MODEL_ARGUMENTS = { - "llama": { - "args": ["TheBloke/Llama-2-7b-Chat-GGUF"], - "kwargs": {"model_file": "llama-2-7b-chat.Q5_K_M.gguf"}, - }, - "mistral": { - "args": ["TheBloke/Mistral-7B-Instruct-v0.1-GGUF"], - "kwargs": {"model_file": "mistral-7b-instruct-v0.1.Q4_K_M.gguf"}, - }, -} - - -def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - for model in MODEL_ARGUMENTS: - if model not in pn.state.cache: - pn.state.cache[model] = AutoModelForCausalLM.from_pretrained( - *MODEL_ARGUMENTS[model]["args"], - **MODEL_ARGUMENTS[model]["kwargs"], - gpu_layers=1, - ) - - llm = pn.state.cache[model] - response = llm(contents, max_new_tokens=512, stream=True) - - message = None - for chunk in response: - message = instance.stream(chunk, user=model.title(), message=message) - - -chat_interface = pn.chat.ChatInterface(callback=callback) -chat_interface.send( - "Send a message to get a reply from both Llama 2 and Mistral (7B)!", - user="System", - respond=False, -) -chat_interface.servable() diff --git a/docs/examples/mistral/mistral_api_chat.py b/docs/examples/mistral/mistral_api_chat.py deleted file mode 100644 index b06ef22..0000000 --- a/docs/examples/mistral/mistral_api_chat.py +++ /dev/null @@ -1,28 +0,0 @@ -import panel as pn -from mistralai.client import MistralClient -from mistralai.models.chat_completion import ChatMessage - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - messages.append(ChatMessage(role="user", content=contents)) - - mistral_response = "" - for chunk in client.chat_stream(model="mistral-tiny", messages=messages): - response = chunk.choices[0].delta.content - if response is not None: - mistral_response += response - yield mistral_response - - if mistral_response: - messages.append(ChatMessage(role="assistant", content=mistral_response)) - - -messages = [] -client = MistralClient() # api_key=os.environ.get("MISTRAL_API_KEY", None) -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="Mistral AI") -chat_interface.send( - "Send a message to get a reply from Mistral AI!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/mistral/mistral_chat.py b/docs/examples/mistral/mistral_chat.py deleted file mode 100644 index 6557eb8..0000000 --- a/docs/examples/mistral/mistral_chat.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -[Mistral](https://docs.mistral.ai) through -[CTransformers](https://github.com/marella/ctransformers). -""" - -import panel as pn -from ctransformers import AutoConfig, AutoModelForCausalLM, Config - -pn.extension() - -llms = pn.state.cache["llms"] = pn.state.cache.get("llms", {}) - -INSTRUCTIONS = "You are a friendly chat bot willing to help out the user." - - -def apply_template(instructions, contents): - text_row = f"""[INST]{instructions} {contents}[/INST]""" - return text_row - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - if "mistral" not in llms: - instance.placeholder_text = "Downloading model; please wait..." - config = AutoConfig( - config=Config( - temperature=0.5, max_new_tokens=2048, context_length=2048, gpu_layers=1 - ), - ) - llms["mistral"] = AutoModelForCausalLM.from_pretrained( - "TheBloke/Mistral-7B-Instruct-v0.1-GGUF", - model_file="mistral-7b-instruct-v0.1.Q4_K_M.gguf", - config=config, - ) - - llm = llms["mistral"] - response = llm(apply_template(INSTRUCTIONS, contents), stream=True) - message = "" - for token in response: - message += token - yield message - - -chat_interface = pn.chat.ChatInterface( - callback=callback, - callback_user="Mistral", - reset_on_send=True, -) -chat_interface.send( - "Send a message to get a reply from Mistral!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/mistral/mistral_with_memory.py b/docs/examples/mistral/mistral_with_memory.py deleted file mode 100644 index d4b1fa9..0000000 --- a/docs/examples/mistral/mistral_with_memory.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -[Mistral](https://docs.mistral.ai) through -[CTransformers](https://github.com/marella/ctransformers). The chatbot includes a -memory of the conversation history. -""" - -import panel as pn -from ctransformers import AutoConfig, AutoModelForCausalLM, Config - -pn.extension() - -SYSTEM_INSTRUCTIONS = "Do what the user requests." - - -def apply_template(history): - history = [message for message in history if message.user != "System"] - prompt = "" - for i, message in enumerate(history): - if i == 0: - prompt += f"[INST]{SYSTEM_INSTRUCTIONS} {message.object}[/INST]" - else: - if message.user == "Mistral": - prompt += f"{message.object}" - else: - prompt += f"""[INST]{message.object}[/INST]""" - return prompt - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - if "mistral" not in llms: - instance.placeholder_text = "Downloading model; please wait..." - config = AutoConfig( - config=Config( - temperature=0.5, max_new_tokens=2048, context_length=2048, gpu_layers=1 - ), - ) - llms["mistral"] = AutoModelForCausalLM.from_pretrained( - "TheBloke/Mistral-7B-Instruct-v0.1-GGUF", - model_file="mistral-7b-instruct-v0.1.Q4_K_M.gguf", - config=config, - ) - - llm = llms["mistral"] - history = [message for message in instance.objects] - prompt = apply_template(history) - response = llm(prompt, stream=True) - message = "" - for token in response: - message += token - yield message - - -llms = {} -chat_interface = pn.chat.ChatInterface( - callback=callback, - callback_user="Mistral", -) -chat_interface.send( - "Send a message to get a reply from Mistral!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/openai/ABC.csv b/docs/examples/openai/ABC.csv deleted file mode 100644 index 3f01efd..0000000 --- a/docs/examples/openai/ABC.csv +++ /dev/null @@ -1,1260 +0,0 @@ -date,open,high,low,close,volume,Name -2013-02-08,46.52,46.895,46.46,46.89,1232802,ABC -2013-02-11,46.85,47.0,46.5,46.76,1115888,ABC -2013-02-12,46.7,47.05,46.6,46.96,1318773,ABC -2013-02-13,46.74,46.9,46.6,46.64,2645247,ABC -2013-02-14,46.67,46.99,46.6,46.77,1941879,ABC -2013-02-15,46.77,47.09,46.51,46.6,2962354,ABC -2013-02-19,46.67,47.24,46.62,47.22,1831692,ABC -2013-02-20,47.22,47.39,46.59,46.61,1970391,ABC -2013-02-21,46.48,46.62,46.1633,46.48,1432331,ABC -2013-02-22,46.48,47.03,46.385,46.95,1003693,ABC -2013-02-25,47.1,47.25,46.18,46.18,1795053,ABC -2013-02-26,46.38,46.62,46.08,46.57,1374357,ABC -2013-02-27,46.66,47.21,46.58,47.03,855645,ABC -2013-02-28,46.96,47.51,46.81,47.2,2254042,ABC -2013-03-01,47.22,48.14,46.89,47.98,2135840,ABC -2013-03-04,47.83,48.25,47.66,48.25,2132714,ABC -2013-03-05,48.04,48.4,47.96,48.27,1646704,ABC -2013-03-06,48.44,48.8,48.27,48.48,1494635,ABC -2013-03-07,48.44,48.65,47.68,48.08,1785841,ABC -2013-03-08,48.27,48.51,47.97,48.48,1427447,ABC -2013-03-11,48.36,48.74,48.29,48.55,1495051,ABC -2013-03-12,48.52,48.85,48.44,48.65,1222544,ABC -2013-03-13,48.71,48.9,48.57,48.86,859108,ABC -2013-03-14,49.0,49.39,48.91,49.29,1810900,ABC -2013-03-15,49.14,49.245,48.82,48.89,2027539,ABC -2013-03-18,48.32,48.67,48.16,48.3,2602318,ABC -2013-03-19,51.39,52.15,49.92,50.06,17071639,ABC -2013-03-20,50.31,50.45,49.72,50.24,3204753,ABC -2013-03-21,50.11,50.54,49.56,49.79,2284953,ABC -2013-03-22,49.8,49.919,49.6,49.82,2891124,ABC -2013-03-25,49.95,50.95,49.84,50.59,3248101,ABC -2013-03-26,50.86,51.21,50.47,50.72,2428292,ABC -2013-03-27,50.51,50.9,50.33,50.71,2027985,ABC -2013-03-28,50.96,51.57,50.795,51.45,2836398,ABC -2013-04-01,51.38,51.659,51.29,51.64,1429459,ABC -2013-04-02,51.92,53.36,51.85,52.41,4047740,ABC -2013-04-03,52.57,52.83,51.315,51.69,4838654,ABC -2013-04-04,51.69,52.83,51.67,51.82,2840059,ABC -2013-04-05,51.43,52.12,51.41,52.08,1596355,ABC -2013-04-08,52.18,52.7,51.69,52.63,2294537,ABC -2013-04-09,52.75,53.15,52.49,52.92,3118306,ABC -2013-04-10,53.93,54.0,53.075,53.3,3149508,ABC -2013-04-11,53.28,54.16,53.25,53.81,2354319,ABC -2013-04-12,53.78,54.33,53.68,54.18,1269258,ABC -2013-04-15,54.14,54.346,53.84,54.04,2243835,ABC -2013-04-16,54.48,54.9875,54.12,54.95,2050535,ABC -2013-04-17,54.78,55.19,54.45,55.06,3176640,ABC -2013-04-18,55.06,56.07,55.02,55.44,4273971,ABC -2013-04-19,55.61,55.99,55.24,55.92,2022474,ABC -2013-04-22,56.42,56.44,55.65,56.2,2571342,ABC -2013-04-23,56.39,56.91,55.94,56.3,2419484,ABC -2013-04-24,56.33,56.595,55.63,55.82,3091286,ABC -2013-04-25,55.82,56.0,53.125,53.41,7992643,ABC -2013-04-26,53.0,53.6,52.78,53.26,5365216,ABC -2013-04-29,53.33,54.28,53.25,54.06,3329041,ABC -2013-04-30,54.07,54.13,53.6,54.12,2065054,ABC -2013-05-01,54.06,54.4,53.76,53.84,2418458,ABC -2013-05-02,53.85,54.86,53.77,54.85,2157842,ABC -2013-05-03,55.01,55.08,54.42,54.57,1851171,ABC -2013-05-06,54.49,54.71,54.34,54.4,1401151,ABC -2013-05-07,54.59,55.04,54.37,54.98,2142219,ABC -2013-05-08,55.03,55.21,54.615,55.1,2598656,ABC -2013-05-09,54.91,55.09,54.49,54.81,1708138,ABC -2013-05-10,54.98,55.0,54.63,54.84,1324707,ABC -2013-05-13,54.68,54.88,54.45,54.48,2022901,ABC -2013-05-14,54.47,54.99,54.25,54.96,2371920,ABC -2013-05-15,55.0,55.48,54.645,55.01,2860546,ABC -2013-05-16,54.64,55.06,54.29,54.4,1766564,ABC -2013-05-17,54.44,55.07,54.18,54.99,2180967,ABC -2013-05-20,55.01,55.23,54.71,54.82,1712667,ABC -2013-05-21,54.77,54.94,54.4,54.65,1698392,ABC -2013-05-22,54.72,55.11,54.37,54.46,2471536,ABC -2013-05-23,54.32,54.8,54.05,54.58,1777800,ABC -2013-05-24,54.36,54.56,54.04,54.44,1935070,ABC -2013-05-28,54.77,55.2,54.045,54.37,2759204,ABC -2013-05-29,54.17,54.5,53.22,53.9,2679109,ABC -2013-05-30,54.0,55.02,53.9,54.72,1743711,ABC -2013-05-31,54.49,54.72,54.08,54.08,2535025,ABC -2013-06-03,54.01,54.14,52.38,52.68,3582039,ABC -2013-06-04,52.73,53.4,52.69,53.26,2779938,ABC -2013-06-05,53.04,53.365,52.84,52.9,3433007,ABC -2013-06-06,52.81,53.08,52.44,52.55,3421160,ABC -2013-06-07,52.7,53.25,52.7,52.94,3489279,ABC -2013-06-10,53.13,53.84,52.97,53.71,2499569,ABC -2013-06-11,53.96,55.18,53.94,54.81,3596473,ABC -2013-06-12,55.02,55.35,54.72,54.75,3673417,ABC -2013-06-13,54.61,54.69,54.04,54.56,2551371,ABC -2013-06-14,54.44,55.21,54.4,54.93,1985238,ABC -2013-06-17,55.13,55.68,55.05,55.66,2929391,ABC -2013-06-18,55.68,55.98,55.39,55.86,1806538,ABC -2013-06-19,55.78,56.39,55.39,55.56,2227143,ABC -2013-06-20,55.47,55.61,54.01,54.14,3015735,ABC -2013-06-21,54.24,54.93,54.15,54.34,2668410,ABC -2013-06-24,54.02,54.31,53.25,53.61,2936736,ABC -2013-06-25,53.74,53.77,53.06,53.55,2846521,ABC -2013-06-26,53.78,55.29,53.78,55.04,2503943,ABC -2013-06-27,55.37,55.76,55.1,55.31,2448161,ABC -2013-06-28,55.5,56.33,55.03,55.83,3351287,ABC -2013-07-01,56.11,56.23,54.97,55.01,2845774,ABC -2013-07-02,55.04,55.32,54.66,54.83,2008657,ABC -2013-07-03,54.5,55.235,54.4,55.09,1156975,ABC -2013-07-05,55.51,55.66,54.9,55.56,1630043,ABC -2013-07-08,55.83,56.3,55.59,56.28,1482479,ABC -2013-07-09,56.47,56.84,55.81,55.91,2100957,ABC -2013-07-10,55.95,56.6,55.91,56.55,1551751,ABC -2013-07-11,57.08,58.15,57.08,58.1,2337368,ABC -2013-07-12,58.02,58.42,57.68,58.39,1359201,ABC -2013-07-15,58.57,58.82,58.07,58.24,1693790,ABC -2013-07-16,58.33,58.61,58.14,58.28,2045875,ABC -2013-07-17,58.38,58.68,58.17,58.47,1138580,ABC -2013-07-18,58.58,58.95,58.04,58.31,2058792,ABC -2013-07-19,58.25,58.905,58.13,58.81,1552845,ABC -2013-07-22,58.74,59.1,58.6,58.99,1239347,ABC -2013-07-23,58.97,59.0,57.595,57.63,4015305,ABC -2013-07-24,58.32,59.3,56.575,56.77,3089498,ABC -2013-07-25,56.2,57.69,55.8,57.61,2869105,ABC -2013-07-26,57.41,58.58,57.28,58.47,1714280,ABC -2013-07-29,58.37,58.78,58.17,58.39,1440888,ABC -2013-07-30,58.84,58.89,58.09,58.23,2487475,ABC -2013-07-31,58.54,58.97,58.0,58.27,1848612,ABC -2013-08-01,58.39,59.115,58.37,58.8,1752703,ABC -2013-08-02,58.75,58.8,58.25,58.48,1560080,ABC -2013-08-05,58.38,59.095,58.29,59.03,1129256,ABC -2013-08-06,59.03,59.03,58.36,58.66,1060763,ABC -2013-08-07,58.31,59.17,58.21,58.74,1414342,ABC -2013-08-08,59.36,59.67,58.89,59.61,1578868,ABC -2013-08-09,59.46,59.75,59.24,59.49,1223240,ABC -2013-08-12,58.9,59.24,58.45,58.67,1544117,ABC -2013-08-13,58.7,58.97,57.98,58.14,2300108,ABC -2013-08-14,58.14,58.59,58.0,58.03,1784627,ABC -2013-08-15,57.71,57.78,56.79,57.27,2153188,ABC -2013-08-16,57.0,57.19,56.53,56.53,2227189,ABC -2013-08-19,56.54,57.095,56.5,56.85,2008063,ABC -2013-08-20,56.92,57.61,56.79,57.45,1888047,ABC -2013-08-21,57.26,57.54,56.94,57.07,1515690,ABC -2013-08-22,57.22,57.66,57.14,57.43,1602558,ABC -2013-08-23,57.41,57.63,57.1,57.45,1494975,ABC -2013-08-26,57.41,57.78,57.32,57.44,822346,ABC -2013-08-27,57.04,57.12,56.592,56.87,1372183,ABC -2013-08-28,56.82,57.135,56.24,56.96,1282693,ABC -2013-08-29,56.87,57.33,56.66,57.15,766779,ABC -2013-08-30,57.19,57.39,56.643,56.92,871662,ABC -2013-09-03,57.46,57.88,57.21,57.39,1054457,ABC -2013-09-04,57.63,58.3,57.592,58.13,981132,ABC -2013-09-05,58.0,58.33,57.88,58.15,879974,ABC -2013-09-06,58.2,58.625,57.47,58.24,885756,ABC -2013-09-09,58.31,58.79,58.15,58.66,941107,ABC -2013-09-10,59.0,59.5,58.89,59.37,1080266,ABC -2013-09-11,59.43,60.49,59.41,60.49,2026184,ABC -2013-09-12,60.42,61.02,60.37,60.46,1430892,ABC -2013-09-13,60.63,60.78,60.09,60.28,1274072,ABC -2013-09-16,60.8,61.26,60.66,60.95,1018103,ABC -2013-09-17,61.0,61.59,60.93,61.59,933056,ABC -2013-09-18,61.63,61.71,60.815,61.49,1137242,ABC -2013-09-19,61.85,62.44,61.78,62.23,1150681,ABC -2013-09-20,62.23,62.3,61.48,61.65,1631014,ABC -2013-09-23,61.68,61.83,61.281,61.57,813104,ABC -2013-09-24,61.73,61.9,60.84,60.91,1320506,ABC -2013-09-25,60.83,61.33,60.83,61.01,1044880,ABC -2013-09-26,60.97,61.44,60.69,61.16,869633,ABC -2013-09-27,60.91,61.35,60.73,61.28,688318,ABC -2013-09-30,60.9,61.27,60.69,61.1,1040175,ABC -2013-10-01,61.18,62.5,61.1,62.48,1117749,ABC -2013-10-02,62.18,62.3,61.58,62.2,1186699,ABC -2013-10-03,62.06,62.34,61.18,61.42,2007948,ABC -2013-10-04,61.59,62.54,61.415,62.47,1327444,ABC -2013-10-07,62.27,62.36,61.88,61.97,742524,ABC -2013-10-08,61.98,62.445,61.07,61.19,1782527,ABC -2013-10-09,61.29,61.475,60.78,61.27,1638446,ABC -2013-10-10,61.8,62.48,61.64,62.43,1209144,ABC -2013-10-11,62.48,62.8791,62.38,62.65,1234458,ABC -2013-10-14,62.46,63.14,62.18,63.11,1068126,ABC -2013-10-15,63.05,63.28,62.6303,62.81,855621,ABC -2013-10-16,63.03,63.76,62.94,63.75,1016124,ABC -2013-10-17,63.77,64.8,63.65,64.62,1674889,ABC -2013-10-18,64.98,65.06,64.33,64.61,1112038,ABC -2013-10-21,64.57,64.64,63.62,63.87,1819767,ABC -2013-10-22,64.18,64.5599,63.53,63.76,1581978,ABC -2013-10-23,63.63,64.0,63.24,63.83,1229867,ABC -2013-10-24,64.2,64.94,64.2,64.64,1090638,ABC -2013-10-25,64.84,65.31,64.55,65.24,1073977,ABC -2013-10-28,65.16,65.38,64.525,65.2,1429321,ABC -2013-10-29,65.38,65.8,65.105,65.8,1334070,ABC -2013-10-30,65.95,65.96,64.87,65.22,1677780,ABC -2013-10-31,65.05,65.98,64.62,65.33,1838205,ABC -2013-11-01,65.55,66.38,65.16,66.38,2300910,ABC -2013-11-04,66.5,67.1,66.25,67.09,2724446,ABC -2013-11-05,67.09,67.79,66.795,66.87,2041563,ABC -2013-11-06,67.17,67.4,66.56,67.16,1410545,ABC -2013-11-07,67.41,67.48,66.59,66.73,1523597,ABC -2013-11-08,66.92,68.02,66.75,68.0,1504484,ABC -2013-11-11,68.08,68.35,67.66,68.11,1020369,ABC -2013-11-12,68.18,68.44,67.82,68.29,1347315,ABC -2013-11-13,67.9,68.49,67.72,68.49,1245785,ABC -2013-11-14,68.34,69.36,68.261,69.22,1378096,ABC -2013-11-15,69.12,69.6135,68.8997,69.5,1407972,ABC -2013-11-18,69.99,70.0,68.94,69.15,1464287,ABC -2013-11-19,69.01,69.49,68.67,68.87,1513335,ABC -2013-11-20,68.88,69.62,68.8,69.34,1294326,ABC -2013-11-21,69.3,70.04,69.28,69.79,1137123,ABC -2013-11-22,69.81,70.19,69.79,70.02,1310264,ABC -2013-11-25,69.8,70.75,69.8,70.42,1077743,ABC -2013-11-26,70.77,71.16,70.29,70.61,1528461,ABC -2013-11-27,70.59,70.91,70.16,70.5,862630,ABC -2013-11-29,70.7,70.88,70.39,70.53,396085,ABC -2013-12-02,70.79,71.38,70.63,70.89,1091839,ABC -2013-12-03,70.54,70.724,70.0303,70.57,1787944,ABC -2013-12-04,70.35,70.82,68.78,69.46,2574896,ABC -2013-12-05,69.24,69.61,68.88,69.14,2130270,ABC -2013-12-06,69.88,70.36,69.6,70.29,1299096,ABC -2013-12-09,70.47,71.21,70.25,70.63,1460386,ABC -2013-12-10,70.11,70.14,68.75,69.77,3159702,ABC -2013-12-11,69.67,69.98,68.74,68.8,1978217,ABC -2013-12-12,69.2,69.56,68.69,68.77,2192591,ABC -2013-12-13,68.81,68.96,67.74,68.45,1849354,ABC -2013-12-16,68.84,69.14,68.14,68.45,1753762,ABC -2013-12-17,68.72,68.76,68.04,68.35,1702097,ABC -2013-12-18,68.6,69.67,68.16,69.61,1727133,ABC -2013-12-19,69.37,69.75,69.17,69.63,1964894,ABC -2013-12-20,69.61,70.2399,69.5601,69.82,2221099,ABC -2013-12-23,70.13,70.27,69.7674,70.03,1124655,ABC -2013-12-24,70.01,70.42,69.94,70.14,417914,ABC -2013-12-26,70.25,70.63,70.01,70.3,640499,ABC -2013-12-27,70.45,70.45,69.825,70.21,897665,ABC -2013-12-30,70.25,70.52,70.01,70.38,642199,ABC -2013-12-31,70.52,70.6401,70.09,70.31,627061,ABC -2014-01-02,70.11,70.23,69.48,69.89,1148391,ABC -2014-01-03,69.89,70.21,69.81,69.94,1101319,ABC -2014-01-06,70.34,70.54,69.35,69.69,1301393,ABC -2014-01-07,69.82,70.92,69.82,70.45,1531604,ABC -2014-01-08,70.33,71.412,70.28,71.14,2084010,ABC -2014-01-09,71.45,71.92,71.1,71.38,1610010,ABC -2014-01-10,71.49,71.55,71.0401,71.34,1582552,ABC -2014-01-13,71.42,71.75,70.38,70.61,1806518,ABC -2014-01-14,70.66,70.96,69.61,70.54,2427311,ABC -2014-01-15,70.74,70.74,70.03,70.55,1251473,ABC -2014-01-16,70.6,70.68,70.26,70.5,1082158,ABC -2014-01-17,70.68,70.94,70.32,70.53,1545181,ABC -2014-01-21,70.88,71.17,70.27,70.52,1368005,ABC -2014-01-22,70.8,70.87,69.8,70.28,1845447,ABC -2014-01-23,70.0,70.88,69.22,69.64,2110899,ABC -2014-01-24,69.27,69.83,67.25,67.25,3487005,ABC -2014-01-27,67.04,67.51,66.641,66.81,2836226,ABC -2014-01-28,66.74,67.89,66.69,67.72,1897287,ABC -2014-01-29,66.67,67.92,66.67,67.26,1906893,ABC -2014-01-30,67.65,68.145,67.65,67.77,1437156,ABC -2014-01-31,66.6,67.49,66.13,67.22,3733492,ABC -2014-02-03,67.13,67.13,65.1,65.17,3542201,ABC -2014-02-04,65.38,65.68,64.86,65.01,1864987,ABC -2014-02-05,64.82,65.12,64.28,64.72,2421358,ABC -2014-02-06,64.86,65.61,64.4,65.51,1553872,ABC -2014-02-07,65.88,66.59,65.7,66.54,1563424,ABC -2014-02-10,66.46,66.48,65.72,66.37,2913611,ABC -2014-02-11,66.23,67.89,66.19,67.56,2487869,ABC -2014-02-12,67.81,68.82,67.58,67.74,1965422,ABC -2014-02-13,67.37,68.19,66.88,67.89,1286198,ABC -2014-02-14,67.73,68.355,67.6,68.0,1117831,ABC -2014-02-18,68.06,68.96,67.99,68.5,1544778,ABC -2014-02-19,68.12,69.02,67.98,68.11,1937429,ABC -2014-02-20,68.4,68.65,67.57,68.39,1469772,ABC -2014-02-21,68.42,69.15,68.16,68.4,1113125,ABC -2014-02-24,68.31,69.22,68.31,68.58,1363080,ABC -2014-02-25,68.39,68.99,68.24,68.72,1497215,ABC -2014-02-26,68.87,69.165,67.96,68.26,1539103,ABC -2014-02-27,68.01,68.31,67.65,68.01,1401629,ABC -2014-02-28,68.17,68.38,67.475,67.85,1681819,ABC -2014-03-03,67.11,67.69,66.965,67.5,1389799,ABC -2014-03-04,68.03,68.94,67.97,68.76,2309079,ABC -2014-03-05,69.0,69.15,68.7,68.89,1427189,ABC -2014-03-06,69.0,69.45,68.75,68.75,1495323,ABC -2014-03-07,69.25,69.27,68.56,68.94,1783383,ABC -2014-03-10,68.91,69.65,68.75,69.1,1232134,ABC -2014-03-11,69.11,69.47,68.171,68.34,1562662,ABC -2014-03-12,67.76,67.85,66.97,67.68,1944473,ABC -2014-03-13,68.01,68.21,67.55,67.79,1897421,ABC -2014-03-14,67.66,67.95,67.4032,67.57,1672775,ABC -2014-03-17,67.73,68.18,67.64,68.08,1444639,ABC -2014-03-18,68.08,68.43,67.775,67.85,1604609,ABC -2014-03-19,68.05,68.1,66.59,66.87,1337650,ABC -2014-03-20,66.74,66.9,66.26,66.48,1526286,ABC -2014-03-21,66.55,66.63,64.56,64.82,3534843,ABC -2014-03-24,65.29,65.4899,63.5299,64.32,2792905,ABC -2014-03-25,64.61,65.31,63.98,64.56,1948609,ABC -2014-03-26,64.92,65.59,64.7799,64.85,2186674,ABC -2014-03-27,64.81,64.82,63.72,64.11,2193227,ABC -2014-03-28,64.25,65.03,64.2,64.82,1672045,ABC -2014-03-31,65.3,65.64,65.06,65.59,1479290,ABC -2014-04-01,65.6,65.92,65.32,65.91,1747343,ABC -2014-04-02,65.75,66.37,65.55,65.58,1836208,ABC -2014-04-03,65.81,65.91,65.38,65.8,2173432,ABC -2014-04-04,65.99,66.34,64.85,65.05,1965787,ABC -2014-04-07,64.94,65.08,63.61,64.18,3037875,ABC -2014-04-08,64.14,64.48,63.4,64.17,1907337,ABC -2014-04-09,64.77,65.71,64.64,65.64,2409056,ABC -2014-04-10,65.91,66.18,64.74,65.0,2105725,ABC -2014-04-11,64.53,65.25,63.86,64.17,2587358,ABC -2014-04-14,64.44,64.68,63.85,64.51,1432207,ABC -2014-04-15,64.56,64.91,63.23,64.22,2011115,ABC -2014-04-16,64.87,65.14,64.3,64.95,1392865,ABC -2014-04-17,64.75,65.09,64.48,64.68,1049588,ABC -2014-04-21,64.21,65.15,64.21,64.8,1224743,ABC -2014-04-22,64.8,65.49,64.66,64.68,1602811,ABC -2014-04-23,64.8,65.19,64.5,64.84,1399772,ABC -2014-04-24,66.0,66.81,63.76,64.89,3612731,ABC -2014-04-25,64.53,64.79,62.55,62.83,2935598,ABC -2014-04-28,63.25,63.68,62.69,63.45,1666982,ABC -2014-04-29,63.7,64.3,63.558,63.99,1758493,ABC -2014-04-30,63.75,65.29,63.59,65.18,3099892,ABC -2014-05-01,64.76,65.17,63.85,64.87,2151107,ABC -2014-05-02,64.89,64.89,64.15,64.27,1796969,ABC -2014-05-05,64.0,65.3,63.88,65.1,1767273,ABC -2014-05-06,64.71,65.0,64.57,64.77,1445797,ABC -2014-05-07,64.87,64.96,64.26,64.54,1946748,ABC -2014-05-08,64.39,65.255,64.195,64.4,1586471,ABC -2014-05-09,64.52,65.75,64.02,65.58,2715846,ABC -2014-05-12,65.98,66.72,65.87,66.67,1719577,ABC -2014-05-13,66.55,67.0,66.05,66.85,2622451,ABC -2014-05-14,66.86,67.42,66.7,67.3,1976824,ABC -2014-05-15,66.89,67.51,66.69,67.5,1934871,ABC -2014-05-16,67.36,68.34,67.31,68.31,2558250,ABC -2014-05-19,68.53,69.5,68.31,69.42,3277747,ABC -2014-05-20,69.41,69.88,68.79,69.79,1907659,ABC -2014-05-21,69.79,70.81,69.79,70.71,2327406,ABC -2014-05-22,70.78,71.66,70.58,71.35,1772193,ABC -2014-05-23,71.55,71.71,71.1518,71.64,1155062,ABC -2014-05-27,71.76,72.41,71.71,72.4,1521070,ABC -2014-05-28,72.36,72.84,72.09,72.68,2384825,ABC -2014-05-29,72.69,73.42,72.69,73.28,1611723,ABC -2014-05-30,73.34,73.57,72.72,73.18,3909596,ABC -2014-06-02,73.31,73.79,73.08,73.21,1602057,ABC -2014-06-03,72.95,73.2,72.67,73.09,1717742,ABC -2014-06-04,73.11,73.67,72.85,73.66,1734230,ABC -2014-06-05,73.51,73.81,72.53,72.79,2691569,ABC -2014-06-06,72.96,72.96,72.05,72.5,2388418,ABC -2014-06-09,72.4,72.84,72.11,72.34,1581366,ABC -2014-06-10,72.17,72.57,71.68,72.01,1796736,ABC -2014-06-11,71.88,72.06,71.47,71.92,1323978,ABC -2014-06-12,71.79,72.05,71.1,71.46,1509338,ABC -2014-06-13,71.45,71.9,71.0,71.78,1269685,ABC -2014-06-16,71.61,72.1,71.23,71.52,1259561,ABC -2014-06-17,71.37,71.93,71.18,71.74,951616,ABC -2014-06-18,71.65,72.24,71.18,72.1,1052417,ABC -2014-06-19,72.18,72.42,71.64,71.93,1256916,ABC -2014-06-20,72.17,72.46,71.91,72.26,2560137,ABC -2014-06-23,72.47,72.77,72.27,72.48,969644,ABC -2014-06-24,72.18,72.74,71.91,72.55,1413799,ABC -2014-06-25,72.58,73.11,72.4,72.74,1029354,ABC -2014-06-26,72.74,72.85,72.39,72.72,931745,ABC -2014-06-27,72.59,73.25,72.43,72.87,1121183,ABC -2014-06-30,72.99,72.99,72.4,72.66,1105872,ABC -2014-07-01,72.67,73.26,72.59,72.98,1096350,ABC -2014-07-02,72.83,73.18,72.72,73.05,1272892,ABC -2014-07-03,73.33,73.42,73.05,73.23,943930,ABC -2014-07-07,73.2,73.23,72.6601,73.01,892266,ABC -2014-07-08,73.0,73.07,72.6,72.87,1273330,ABC -2014-07-09,73.1,73.26,72.67,72.98,729488,ABC -2014-07-10,72.54,73.14,72.011,72.92,764149,ABC -2014-07-11,72.94,73.1,72.43,72.98,527146,ABC -2014-07-14,73.32,73.5,72.85,73.1,664817,ABC -2014-07-15,72.8,73.09,72.45,72.7,1672248,ABC -2014-07-16,72.99,72.99,72.4,72.97,1446615,ABC -2014-07-17,72.78,73.55,72.49,72.7,1274324,ABC -2014-07-18,72.85,73.3,72.56,73.06,1431481,ABC -2014-07-21,72.92,73.0,72.38,72.74,1055071,ABC -2014-07-22,73.06,73.88,73.02,73.72,1257019,ABC -2014-07-23,73.74,74.28,73.62,74.06,1931615,ABC -2014-07-24,77.05,79.13,75.62,76.37,4459397,ABC -2014-07-25,76.41,77.06,76.21,76.79,2049734,ABC -2014-07-28,76.75,77.72,76.565,77.4,3131747,ABC -2014-07-29,77.39,77.99,77.25,77.62,1533998,ABC -2014-07-30,77.99,78.46,77.693,77.95,1270841,ABC -2014-07-31,77.67,78.04,76.72,76.91,2099234,ABC -2014-08-01,77.28,77.92,76.54,77.79,3296305,ABC -2014-08-04,77.28,77.92,76.54,77.79,3296305,ABC -2014-08-05,77.27,77.78,76.84,77.05,1668612,ABC -2014-08-06,76.67,76.87,75.4901,76.03,2228307,ABC -2014-08-07,76.37,76.4577,74.86,74.96,2009757,ABC -2014-08-08,75.0,75.57,74.66,75.43,1353122,ABC -2014-08-11,75.77,76.39,75.53,75.89,1267074,ABC -2014-08-12,75.94,76.12,75.6,75.82,1084400,ABC -2014-08-13,76.17,76.49,75.94,76.14,1170230,ABC -2014-08-14,76.06,76.64,76.044,76.62,1044199,ABC -2014-08-15,76.79,77.0,76.1,76.33,1594725,ABC -2014-08-18,76.53,76.99,76.36,76.6,698416,ABC -2014-08-19,76.68,77.1,76.63,76.93,715761,ABC -2014-08-20,76.93,77.02,76.58,76.86,718598,ABC -2014-08-21,77.06,77.37,76.88,76.93,758924,ABC -2014-08-22,77.05,77.285,76.43,76.6,681067,ABC -2014-08-25,77.03,77.29,76.67,76.92,454559,ABC -2014-08-26,76.85,76.91,76.35,76.72,615982,ABC -2014-08-27,76.97,76.97,76.001,76.37,774424,ABC -2014-08-28,76.31,76.92,76.09,76.72,580792,ABC -2014-08-29,77.0,77.41,76.8,77.39,992147,ABC -2014-09-02,77.51,77.69,77.05,77.15,1112317,ABC -2014-09-03,77.51,77.65,77.19,77.36,665290,ABC -2014-09-04,77.32,77.94,77.08,77.35,840115,ABC -2014-09-05,77.18,78.26,76.98,78.14,1240679,ABC -2014-09-08,77.92,78.5992,77.82,78.33,1086239,ABC -2014-09-09,78.22,78.33,77.83,78.0,1084259,ABC -2014-09-10,78.15,78.34,77.82,77.96,913905,ABC -2014-09-11,77.52,77.725,76.73,77.35,1787043,ABC -2014-09-12,77.35,77.6,76.61,77.1,1329951,ABC -2014-09-15,76.99,77.1025,76.55,76.84,994352,ABC -2014-09-16,76.47,77.44,75.71,77.11,1988534,ABC -2014-09-17,77.25,77.29,76.19,76.53,1691112,ABC -2014-09-18,76.81,77.23,76.7,77.13,1121072,ABC -2014-09-19,77.47,78.12,77.32,77.33,3064569,ABC -2014-09-22,77.35,77.56,76.75,77.18,1283503,ABC -2014-09-23,76.72,77.15,76.43,76.79,1196226,ABC -2014-09-24,76.92,78.12,76.682,78.04,1749773,ABC -2014-09-25,77.71,78.02,77.09,77.49,1738180,ABC -2014-09-26,77.41,77.67,76.86,77.42,910138,ABC -2014-09-29,77.05,77.84,76.95,77.63,973068,ABC -2014-09-30,77.65,78.02,77.19,77.3,1373497,ABC -2014-10-01,78.21,78.21,76.44,76.69,1491693,ABC -2014-10-02,76.61,77.25,76.365,76.87,1470542,ABC -2014-10-03,77.11,78.21,77.11,77.89,1578682,ABC -2014-10-06,78.07,78.58,77.02,77.52,1328932,ABC -2014-10-07,77.2,77.24,76.48,76.5,1070363,ABC -2014-10-08,76.37,77.98,76.33,77.89,1419595,ABC -2014-10-09,77.89,78.46,77.14,77.2,1498999,ABC -2014-10-10,77.35,78.11,76.86,76.9,1407495,ABC -2014-10-13,76.94,77.19,75.75,75.84,1534841,ABC -2014-10-14,76.25,77.34,75.66,76.63,1645429,ABC -2014-10-15,75.79,76.41,74.51,76.02,2385579,ABC -2014-10-16,75.38,75.8699,74.32,75.02,2415304,ABC -2014-10-17,75.47,76.18,75.32,75.84,1590322,ABC -2014-10-20,75.87,76.02,75.11,75.49,2599298,ABC -2014-10-21,76.2,76.86,76.02,76.62,3045483,ABC -2014-10-22,76.9,77.2,76.59,76.63,1946443,ABC -2014-10-23,77.32,77.9,77.18,77.44,1979067,ABC -2014-10-24,77.71,78.33,77.43,78.28,1425460,ABC -2014-10-27,78.34,79.99,78.29,79.45,3261053,ABC -2014-10-28,79.98,79.99,78.935,79.65,2253399,ABC -2014-10-29,79.58,80.19,79.26,79.74,2529790,ABC -2014-10-30,87.95,88.78,87.54,88.71,1323449,ABC -2014-10-31,85.06,85.99,84.77,85.41,3248353,ABC -2014-11-03,85.81,86.96,85.58,86.9,2569215,ABC -2014-11-04,86.63,88.215,86.495,88.19,2796558,ABC -2014-11-05,88.83,89.23,87.61,87.81,2050282,ABC -2014-11-06,87.96,88.11,86.82,87.4,2317247,ABC -2014-11-07,87.8,87.8,86.485,86.73,2199257,ABC -2014-11-10,87.02,87.863,86.52,87.71,1297153,ABC -2014-11-11,87.71,88.44,87.64,87.93,1214458,ABC -2014-11-12,87.95,88.78,87.54,88.71,1323449,ABC -2014-11-13,88.54,89.48,88.54,89.08,1563532,ABC -2014-11-14,88.87,89.24,88.34,88.83,1635048,ABC -2014-11-17,88.83,89.7,88.57,89.01,1624510,ABC -2014-11-18,89.0,89.97,88.67,89.76,1611233,ABC -2014-11-19,89.44,89.9399,88.89,89.32,2207291,ABC -2014-11-20,89.19,89.305,88.34,88.9,1411695,ABC -2014-11-21,89.0,89.03,88.1,88.74,1635722,ABC -2014-11-24,89.21,90.1,89.21,90.04,1309498,ABC -2014-11-25,90.04,90.45,89.51,90.32,1205405,ABC -2014-11-26,90.22,90.7999,89.5,90.09,974913,ABC -2014-11-28,90.49,91.59,90.46,91.05,758326,ABC -2014-12-01,90.64,91.4,90.28,90.84,1586399,ABC -2014-12-02,90.93,92.51,90.87,92.04,1769841,ABC -2014-12-03,91.99,92.39,91.715,91.99,1533075,ABC -2014-12-04,91.87,92.645,91.68,91.95,1444872,ABC -2014-12-05,92.38,92.56,91.86,92.21,740071,ABC -2014-12-08,92.01,92.84,91.97,92.56,1507429,ABC -2014-12-09,91.45,92.25,90.77,92.09,1409215,ABC -2014-12-10,91.92,92.28,90.81,90.92,1244290,ABC -2014-12-11,91.93,92.84,91.52,92.55,1767694,ABC -2014-12-12,91.88,92.53,90.92,90.98,1691484,ABC -2014-12-15,91.55,91.72,89.98,90.69,2359807,ABC -2014-12-16,90.19,91.15,89.255,89.43,2243390,ABC -2014-12-17,89.82,91.02,89.485,90.56,1958540,ABC -2014-12-18,91.12,91.39,90.43,91.28,2160743,ABC -2014-12-19,92.04,92.56,91.12,91.68,2886939,ABC -2014-12-22,92.26,92.56,91.48,91.94,1576047,ABC -2014-12-23,92.4,92.65,90.69,91.42,1175836,ABC -2014-12-24,91.65,92.26,91.46,91.6,436029,ABC -2014-12-26,91.54,92.21,91.3,91.51,393279,ABC -2014-12-29,91.26,91.89,91.18,91.63,752672,ABC -2014-12-30,91.47,91.67,91.08,91.28,1088552,ABC -2014-12-31,91.57,91.93,90.09,90.16,978958,ABC -2015-01-02,90.61,91.32,89.82,90.46,1124780,ABC -2015-01-05,90.07,90.74,89.64,89.69,1631322,ABC -2015-01-06,90.14,91.42,89.52,90.18,1761559,ABC -2015-01-07,90.98,92.4,90.43,91.98,1608941,ABC -2015-01-08,91.74,92.77,91.16,92.19,2399882,ABC -2015-01-09,92.44,93.27,92.32,93.0,1553653,ABC -2015-01-12,93.77,94.35,90.55,90.93,3524557,ABC -2015-01-13,91.87,93.42,90.933,91.64,3463946,ABC -2015-01-14,91.42,92.87,90.59,92.1,1999984,ABC -2015-01-15,92.15,92.4,91.1,91.4,1362570,ABC -2015-01-16,91.14,92.46,90.35,92.42,1541666,ABC -2015-01-20,93.01,93.42,91.39,92.44,1179964,ABC -2015-01-21,92.38,93.0,91.8995,92.78,1531623,ABC -2015-01-22,93.23,94.225,91.95,94.15,1641097,ABC -2015-01-23,94.38,95.16,94.3,94.73,1854808,ABC -2015-01-26,94.85,95.87,94.3,95.85,2097471,ABC -2015-01-27,95.17,96.49,94.705,95.92,2734029,ABC -2015-01-28,98.8,100.2,95.26,96.37,5425090,ABC -2015-01-29,96.87,97.13,94.89,96.34,3327407,ABC -2015-01-30,96.18,96.5,94.98,95.05,2346798,ABC -2015-02-02,95.57,95.77,93.6,95.15,1697600,ABC -2015-02-03,95.67,95.83,94.27,95.64,2081342,ABC -2015-02-04,95.62,96.454,95.2315,95.87,1777618,ABC -2015-02-05,96.27,96.67,95.98,96.34,1055712,ABC -2015-02-06,96.95,98.23,96.59,97.04,2015514,ABC -2015-02-09,96.97,96.97,95.075,95.56,1645220,ABC -2015-02-10,96.34,97.01,95.77,96.91,2248000,ABC -2015-02-11,97.01,99.08,96.93,98.74,3137776,ABC -2015-02-12,98.83,99.85,98.23,99.83,2224613,ABC -2015-02-13,99.68,100.36,99.0117,100.34,2064418,ABC -2015-02-17,100.21,100.85,99.68,100.6,2196913,ABC -2015-02-18,100.21,100.92,99.61,100.66,1680401,ABC -2015-02-19,100.52,101.03,100.09,101.01,1421490,ABC -2015-02-20,101.02,102.0,100.18,101.98,1442607,ABC -2015-02-23,101.98,102.89,101.79,102.76,1107836,ABC -2015-02-24,102.76,103.14,102.29,102.97,1169184,ABC -2015-02-25,102.97,103.705,102.75,103.29,1064949,ABC -2015-02-26,102.93,103.64,102.62,102.98,1711928,ABC -2015-02-27,103.08,103.31,102.49,102.76,1329252,ABC -2015-03-02,102.57,103.25,102.44,103.1,1948322,ABC -2015-03-03,103.41,103.97,102.5,103.04,1995403,ABC -2015-03-04,102.53,103.65,102.13,103.48,1832610,ABC -2015-03-05,103.74,103.9199,103.17,103.64,1208409,ABC -2015-03-06,103.19,103.59,102.0,102.26,1549308,ABC -2015-03-09,102.47,103.58,102.01,103.34,1295312,ABC -2015-03-10,102.62,103.52,102.22,102.93,1912149,ABC -2015-03-11,103.4,104.1,102.95,103.84,1719937,ABC -2015-03-12,104.0,105.515,103.95,105.38,1448166,ABC -2015-03-13,105.26,106.38,105.12,106.15,1746949,ABC -2015-03-16,108.13,110.2,106.971,110.13,3548820,ABC -2015-03-17,109.94,110.71,109.27,110.36,1787818,ABC -2015-03-18,110.45,112.28,109.82,111.7,2043999,ABC -2015-03-19,111.88,112.5,111.07,112.37,1422235,ABC -2015-03-20,112.98,114.5,112.84,113.46,4380904,ABC -2015-03-23,113.74,114.53,113.13,113.64,1921339,ABC -2015-03-24,113.64,114.73,113.36,113.89,1913505,ABC -2015-03-25,113.9,114.6,111.8,112.14,2066891,ABC -2015-03-26,111.5,113.05,110.54,112.09,2411776,ABC -2015-03-27,112.33,113.58,112.2128,113.19,1881946,ABC -2015-03-30,113.7,114.0,113.37,113.49,2180388,ABC -2015-03-31,113.42,114.27,113.42,113.67,2254190,ABC -2015-04-01,113.93,113.93,111.41,112.03,2171922,ABC -2015-04-02,112.16,112.59,111.58,112.39,2049432,ABC -2015-04-06,111.99,113.13,111.79,112.54,1869191,ABC -2015-04-07,112.91,113.49,112.45,112.68,1189853,ABC -2015-04-08,112.77,113.5,112.43,113.24,2048854,ABC -2015-04-09,113.31,114.28,112.64,113.5,2425708,ABC -2015-04-10,113.4,115.55,113.2244,115.35,1879608,ABC -2015-04-13,115.31,115.8799,113.66,113.99,1826936,ABC -2015-04-14,114.34,114.45,112.7,114.0,1032017,ABC -2015-04-15,114.2,114.33,112.78,113.62,1327992,ABC -2015-04-16,113.41,114.06,112.525,113.83,1055597,ABC -2015-04-17,113.21,113.71,112.05,112.61,2297916,ABC -2015-04-20,112.96,113.56,112.89,113.19,1412978,ABC -2015-04-21,113.89,114.49,113.24,113.89,1176767,ABC -2015-04-22,113.85,114.41,113.37,113.81,1145338,ABC -2015-04-23,113.81,116.18,113.57,115.2,2152201,ABC -2015-04-24,115.1,115.98,114.19,115.48,850348,ABC -2015-04-27,115.88,115.88,113.854,114.11,1563069,ABC -2015-04-28,114.0,114.62,112.61,113.96,1339922,ABC -2015-04-29,113.69,113.99,112.12,112.44,2201106,ABC -2015-04-30,117.39,120.68,113.03,114.3,5198131,ABC -2015-05-01,115.12,115.32,113.83,114.61,2903843,ABC -2015-05-04,114.67,116.28,114.51,115.25,2195218,ABC -2015-05-05,115.62,115.75,114.4,114.7,1843962,ABC -2015-05-06,114.9,115.13,113.507,114.06,1703618,ABC -2015-05-07,114.28,115.07,113.79,114.07,1707853,ABC -2015-05-08,114.54,116.0,114.53,115.3,2486790,ABC -2015-05-11,115.14,115.81,114.75,114.91,1592279,ABC -2015-05-12,114.5,115.4497,114.265,115.05,1988941,ABC -2015-05-13,115.47,115.85,114.17,114.3,1533445,ABC -2015-05-14,114.4,114.84,114.0,114.66,2416767,ABC -2015-05-15,114.86,115.07,113.75,114.18,1947754,ABC -2015-05-18,113.36,113.62,112.56,113.31,2804059,ABC -2015-05-19,113.54,115.58,113.42,115.11,1902393,ABC -2015-05-20,115.1,115.51,114.7,114.71,1885274,ABC -2015-05-21,114.5,114.81,113.87,113.94,2319280,ABC -2015-05-22,113.76,114.335,113.53,113.8,1498427,ABC -2015-05-26,113.88,114.45,112.805,113.0,1559989,ABC -2015-05-27,112.91,113.82,112.77,113.59,1250265,ABC -2015-05-28,113.15,114.06,112.93,113.1,1360766,ABC -2015-05-29,113.22,114.4,112.56,112.56,4702561,ABC -2015-06-01,112.81,113.76,111.83,113.26,1360577,ABC -2015-06-02,112.55,113.34,112.01,112.48,1403071,ABC -2015-06-03,112.35,113.16,112.19,112.38,1452942,ABC -2015-06-04,111.47,112.6,111.1,111.49,1562458,ABC -2015-06-05,111.21,111.65,110.25,111.5,1382007,ABC -2015-06-08,111.31,111.81,110.21,110.22,1335504,ABC -2015-06-09,110.74,110.74,108.61,109.12,1917939,ABC -2015-06-10,109.04,109.185,108.28,108.75,1980546,ABC -2015-06-11,108.92,111.1396,108.8,110.41,1906301,ABC -2015-06-12,111.0,111.0,109.81,110.15,1163105,ABC -2015-06-15,109.77,112.33,109.0,110.63,2277562,ABC -2015-06-16,110.68,112.09,110.17,111.18,1671152,ABC -2015-06-17,111.38,112.13,110.87,111.51,1597613,ABC -2015-06-18,111.5,112.63,110.72,111.92,1329971,ABC -2015-06-19,111.72,112.31,111.3375,111.47,4169750,ABC -2015-06-22,112.58,113.19,111.65,111.79,902844,ABC -2015-06-23,112.18,112.47,111.58,111.81,1035572,ABC -2015-06-24,111.5,111.6,109.57,109.6,1558230,ABC -2015-06-25,109.49,109.7,107.3101,107.61,3239384,ABC -2015-06-26,108.07,108.56,107.32,107.84,1622856,ABC -2015-06-29,106.81,107.62,105.78,106.1,1730588,ABC -2015-06-30,106.3,107.31,106.15,106.34,1958595,ABC -2015-07-01,107.17,108.07,106.87,107.71,1897518,ABC -2015-07-02,107.71,108.32,107.03,107.1,1269020,ABC -2015-07-06,106.47,108.57,106.0,108.03,1634585,ABC -2015-07-07,108.74,108.79,107.01,108.63,1298465,ABC -2015-07-08,107.54,108.56,107.39,107.83,1174182,ABC -2015-07-09,108.97,109.15,108.08,108.86,2158668,ABC -2015-07-10,109.74,109.98,109.27,109.46,1170511,ABC -2015-07-13,110.52,110.65,109.36,110.07,1010223,ABC -2015-07-14,109.99,110.69,109.23,110.4,1505721,ABC -2015-07-15,110.67,110.93,109.496,110.53,947175,ABC -2015-07-16,111.0,111.93,109.93,111.88,1736320,ABC -2015-07-17,111.85,113.66,111.1,113.2,1932064,ABC -2015-07-20,113.23,115.0,113.18,114.76,1976118,ABC -2015-07-21,114.65,114.94,113.5,114.1,1466030,ABC -2015-07-22,114.01,115.41,113.8706,114.95,2397182,ABC -2015-07-23,110.62,114.02,109.52,110.12,4172044,ABC -2015-07-24,109.77,110.144,108.45,108.73,2145338,ABC -2015-07-27,108.13,108.2917,106.8,107.37,2689998,ABC -2015-07-28,107.8,108.22,106.77,108.0,1796630,ABC -2015-07-29,107.79,109.08,107.07,107.78,2402743,ABC -2015-07-30,107.49,107.49,104.31,104.47,3974168,ABC -2015-07-31,104.58,106.25,104.1333,105.75,2718811,ABC -2015-08-03,105.75,106.65,105.14,105.82,1741467,ABC -2015-08-04,106.16,106.96,105.77,106.21,2189909,ABC -2015-08-05,106.65,107.5,106.42,107.25,1667823,ABC -2015-08-06,107.37,107.85,104.56,104.75,1776170,ABC -2015-08-07,104.51,104.65,102.88,104.36,2696148,ABC -2015-08-10,105.04,106.26,104.45,104.65,1423005,ABC -2015-08-11,104.24,105.0,103.475,104.51,1647051,ABC -2015-08-12,104.19,105.84,102.45,104.38,1905030,ABC -2015-08-13,104.73,105.294,102.291,103.88,1666982,ABC -2015-08-14,103.98,104.19,103.41,103.94,1040572,ABC -2015-08-17,103.75,104.68,102.79,104.62,987897,ABC -2015-08-18,104.36,105.87,104.36,104.95,1091105,ABC -2015-08-19,104.52,107.02,103.85,106.14,2161891,ABC -2015-08-20,105.1,105.7,103.34,103.37,1792888,ABC -2015-08-21,102.48,103.49,101.34,101.8,2746232,ABC -2015-08-24,93.0,100.85,91.48,98.53,4569825,ABC -2015-08-25,100.62,100.88,97.32,97.41,2699862,ABC -2015-08-26,99.39,100.86,97.05,100.61,2741311,ABC -2015-08-27,101.47,101.785,99.4,101.15,4173816,ABC -2015-08-28,101.34,101.49,100.31,100.87,1693468,ABC -2015-08-31,100.87,101.84,95.61,100.04,1496300,ABC -2015-09-01,97.55,100.86,97.55,99.27,2533578,ABC -2015-09-02,100.51,101.77,99.1,100.28,1695578,ABC -2015-09-03,100.9,101.49,99.62,100.0,1580082,ABC -2015-09-04,100.66,101.4,98.36,98.99,2526560,ABC -2015-09-08,100.8,101.18,99.79,101.05,1844782,ABC -2015-09-09,101.14,103.46,100.55,100.75,1588412,ABC -2015-09-10,100.65,102.88,100.36,102.03,1782336,ABC -2015-09-11,101.88,104.4,101.82,104.34,1994741,ABC -2015-09-14,104.82,105.48,103.79,104.62,1762397,ABC -2015-09-15,104.72,105.81,104.21,105.56,1481576,ABC -2015-09-16,105.51,106.45,104.82,106.29,1538946,ABC -2015-09-17,106.44,108.18,105.63,106.78,1631793,ABC -2015-09-18,105.91,106.915,105.296,106.1,2222304,ABC -2015-09-21,106.58,107.36,104.76,105.28,1460542,ABC -2015-09-22,104.34,104.5,102.66,103.27,2487104,ABC -2015-09-23,103.2,103.67,102.28,102.82,2217843,ABC -2015-09-24,102.43,102.57,100.59,101.97,1616047,ABC -2015-09-25,102.39,102.465,99.39,99.84,1768355,ABC -2015-09-28,99.34,99.65,96.11,96.19,3379058,ABC -2015-09-29,96.19,96.81,94.41,95.05,2988453,ABC -2015-09-30,96.08,96.68,94.07,94.99,2381231,ABC -2015-10-01,94.99,95.11,93.2,94.51,3193091,ABC -2015-10-02,92.34,94.98,90.82,94.91,3488852,ABC -2015-10-05,95.34,95.83,94.01,94.99,2541998,ABC -2015-10-06,95.47,95.9,92.27,93.03,3778003,ABC -2015-10-07,93.35,93.73,92.26,92.94,2858779,ABC -2015-10-08,93.06,93.88,91.77,93.54,2198667,ABC -2015-10-09,93.23,93.69,92.33,92.84,2815940,ABC -2015-10-12,93.02,93.75,92.8,93.72,1920539,ABC -2015-10-13,93.88,95.19,93.8,94.15,3040779,ABC -2015-10-14,94.0,94.98,92.74,93.03,2191237,ABC -2015-10-15,93.16,94.3,92.11,94.07,2959624,ABC -2015-10-16,94.62,95.32,94.34,95.06,1680239,ABC -2015-10-19,94.97,95.54,94.17,94.79,1561168,ABC -2015-10-20,94.62,95.05,92.87,93.18,2668072,ABC -2015-10-21,93.45,93.83,91.69,92.71,2795542,ABC -2015-10-22,92.88,93.69,91.79,92.8,3423227,ABC -2015-10-23,93.65,94.29,92.4,93.13,2596971,ABC -2015-10-26,93.06,94.16,92.84,93.63,2502124,ABC -2015-10-27,93.93,102.64,92.99,97.54,9731262,ABC -2015-10-28,97.68,98.18,96.21,97.5,3478482,ABC -2015-10-29,97.27,97.538,93.02,96.82,5179667,ABC -2015-10-30,97.27,98.49,96.4,96.51,3115065,ABC -2015-11-02,97.51,98.05,96.56,97.83,2434245,ABC -2015-11-03,97.44,98.17,96.77,97.67,1670818,ABC -2015-11-04,97.67,97.95,96.69,97.47,1871021,ABC -2015-11-05,97.8,98.41,97.38,97.89,2012510,ABC -2015-11-06,98.01,98.71,96.53,97.61,1601353,ABC -2015-11-09,97.33,97.905,96.38,97.4,1699133,ABC -2015-11-10,97.5,99.05,97.18,98.87,1815344,ABC -2015-11-11,99.41,99.64,98.15,98.24,1515599,ABC -2015-11-12,96.29,98.38,96.29,96.91,1914257,ABC -2015-11-13,96.59,97.63,96.22,96.59,1728151,ABC -2015-11-16,96.86,98.07,96.54,97.3,1829729,ABC -2015-11-17,97.73,99.53,97.33,99.17,2315615,ABC -2015-11-18,99.07,100.44,98.87,100.31,1988706,ABC -2015-11-19,100.33,100.86,99.04,99.19,2044635,ABC -2015-11-20,99.6,99.92,98.64,99.1,2299651,ABC -2015-11-23,99.0,99.0,98.4697,98.73,1062448,ABC -2015-11-24,98.16,98.62,97.75,98.5,1628614,ABC -2015-11-25,99.0,99.22,97.83,98.93,2091253,ABC -2015-11-27,99.02,99.7,98.47,98.99,775378,ABC -2015-11-30,99.08,99.49,98.63,98.64,1579810,ABC -2015-12-01,98.84,99.828,98.81,99.5,1823844,ABC -2015-12-02,99.83,100.01,98.51,98.85,1467961,ABC -2015-12-03,99.01,99.21,98.03,98.71,2629090,ABC -2015-12-04,99.12,100.4,98.54,99.89,1697308,ABC -2015-12-07,99.92,100.4,99.34,99.98,1265135,ABC -2015-12-08,99.65,100.58,99.33,100.47,1716788,ABC -2015-12-09,99.53,101.01,99.37,100.67,3365962,ABC -2015-12-10,100.93,102.42,100.62,101.95,2678670,ABC -2015-12-11,101.49,102.81,101.29,102.08,2028321,ABC -2015-12-14,102.2,102.75,101.33,102.53,2361560,ABC -2015-12-15,103.02,103.49,101.34,101.69,2229678,ABC -2015-12-16,102.1,102.44,100.44,101.59,1993677,ABC -2015-12-17,101.69,102.89,101.44,102.34,2791828,ABC -2015-12-18,102.13,102.61,101.24,101.88,4466683,ABC -2015-12-21,102.17,102.92,101.61,102.89,1669306,ABC -2015-12-22,103.0,103.44,102.2,103.16,1087925,ABC -2015-12-23,103.15,104.24,102.8001,103.94,1244544,ABC -2015-12-24,103.71,104.19,103.52,103.95,435407,ABC -2015-12-28,103.85,104.57,103.48,104.19,804389,ABC -2015-12-29,104.69,105.35,104.2795,104.77,828491,ABC -2015-12-30,104.94,105.807,104.79,105.02,1043943,ABC -2015-12-31,104.47,105.12,103.68,103.71,1107637,ABC -2016-01-04,102.31,102.64,101.29,101.87,2326986,ABC -2016-01-05,102.2,103.4,101.59,103.36,2049940,ABC -2016-01-06,101.46,102.64,101.31,101.78,2267322,ABC -2016-01-07,100.87,101.15,98.49,98.53,3658651,ABC -2016-01-08,99.16,99.51,97.8,97.96,1837547,ABC -2016-01-11,95.51,96.0,93.81,94.06,5985830,ABC -2016-01-12,94.45,95.51,93.78,94.18,3620742,ABC -2016-01-13,94.31,94.85,92.94,93.14,4002668,ABC -2016-01-14,92.79,94.2431,92.42,93.72,2789404,ABC -2016-01-15,93.24,93.53,91.43,92.77,3212381,ABC -2016-01-19,93.29,93.341,91.59,92.54,3424171,ABC -2016-01-20,91.79,92.25,89.68,90.9,4490406,ABC -2016-01-21,90.9,91.83,89.79,90.57,2196707,ABC -2016-01-22,91.89,92.08,90.84,91.0,1985773,ABC -2016-01-25,91.99,92.23,91.05,91.27,2439425,ABC -2016-01-26,91.41,91.59,90.61,91.26,1327540,ABC -2016-01-27,91.29,91.52,89.73,90.18,2844540,ABC -2016-01-28,90.5,90.73,87.09,87.29,6053374,ABC -2016-01-29,87.55,89.6,87.28,89.56,3764840,ABC -2016-02-01,89.11,89.38,87.66,89.0,3036577,ABC -2016-02-02,88.6,89.89,87.95,88.14,2849591,ABC -2016-02-03,88.34,88.545,86.87,87.99,3998001,ABC -2016-02-04,80.51,87.97,80.5,84.4,8039420,ABC -2016-02-05,84.0,86.13,82.66,84.7,4853507,ABC -2016-02-08,83.89,84.24,82.72,83.62,4596574,ABC -2016-02-09,82.69,85.04,82.62,84.75,2808974,ABC -2016-02-10,85.44,86.35,85.22,85.6,2708381,ABC -2016-02-11,84.25,85.09,84.19,84.55,2758391,ABC -2016-02-12,84.6,85.61,84.49,85.61,1880401,ABC -2016-02-16,86.11,86.23,85.0,85.61,2234449,ABC -2016-02-17,86.06,87.83,85.72,87.49,3574732,ABC -2016-02-18,87.03,87.49,86.59,86.69,2225791,ABC -2016-02-19,86.74,87.05,86.28,86.84,1653572,ABC -2016-02-22,86.86,87.89,86.83,87.7,2193821,ABC -2016-02-23,87.69,88.1899,86.86,87.35,1675813,ABC -2016-02-24,86.52,87.28,85.8,87.08,2737266,ABC -2016-02-25,87.55,88.03,86.88,87.6,1264526,ABC -2016-02-26,87.85,88.57,87.47,87.54,1160721,ABC -2016-02-29,87.27,87.51,86.435,86.62,2595945,ABC -2016-03-01,86.8,87.73,86.47,87.43,1942517,ABC -2016-03-02,87.43,87.96,87.04,87.63,1628516,ABC -2016-03-03,87.63,88.34,87.13,88.22,1908021,ABC -2016-03-04,88.17,88.47,87.781,88.0,1667303,ABC -2016-03-07,87.36,88.25,87.19,88.1,1757279,ABC -2016-03-08,88.33,88.34,87.14,87.82,1834779,ABC -2016-03-09,87.79,88.01,87.37,87.57,1817730,ABC -2016-03-10,88.01,88.16,85.81,86.18,3419134,ABC -2016-03-11,87.02,89.64,86.56,89.55,3536540,ABC -2016-03-14,89.06,89.77,87.88,87.96,1454111,ABC -2016-03-15,87.5,88.13,86.11,86.3,2220405,ABC -2016-03-16,86.29,87.34,85.94,87.07,1399232,ABC -2016-03-17,87.07,87.47,85.73,85.88,3023176,ABC -2016-03-18,86.12,87.78,85.41,87.48,3675972,ABC -2016-03-21,87.58,87.84,86.8,87.13,1627142,ABC -2016-03-22,87.27,87.5315,86.62,87.16,1687835,ABC -2016-03-23,87.5,87.84,87.09,87.27,1327800,ABC -2016-03-24,86.86,87.23,85.9499,86.17,2449692,ABC -2016-03-28,87.04,87.4,85.71,86.48,1463831,ABC -2016-03-29,86.55,86.86,85.73,86.42,1413437,ABC -2016-03-30,86.5,87.445,86.42,87.09,1659918,ABC -2016-03-31,87.13,87.74,86.3,86.55,1716159,ABC -2016-04-01,86.61,86.85,85.995,86.51,2033286,ABC -2016-04-04,86.74,88.1,86.61,87.16,2271630,ABC -2016-04-05,86.37,87.01,85.58,85.69,2548841,ABC -2016-04-06,85.76,87.6,85.72,87.54,2816776,ABC -2016-04-07,87.3,88.0,86.6,86.66,2218686,ABC -2016-04-08,86.75,86.89,85.12,85.31,2483797,ABC -2016-04-11,85.16,85.535,84.36,84.44,1869825,ABC -2016-04-12,84.61,85.15,84.0499,84.9,1735067,ABC -2016-04-13,85.33,85.78,84.65,85.17,1930230,ABC -2016-04-14,84.7,87.14,84.63,86.7,2126537,ABC -2016-04-15,86.6,87.88,86.49,87.83,2362427,ABC -2016-04-18,88.04,88.72,87.64,88.66,1746813,ABC -2016-04-19,89.41,90.24,89.22,89.49,2041505,ABC -2016-04-20,89.92,91.97,89.64,91.86,3145279,ABC -2016-04-21,91.63,92.48,91.52,91.83,2346406,ABC -2016-04-22,92.03,92.42,91.15,91.89,1941408,ABC -2016-04-25,91.75,91.99,90.98,91.51,2236917,ABC -2016-04-26,91.73,92.0,91.13,91.28,1126704,ABC -2016-04-27,91.32,91.65,90.29,90.64,1708393,ABC -2016-04-28,89.61,89.61,86.18,86.42,5342208,ABC -2016-04-29,86.13,86.49,84.37,85.1,3905081,ABC -2016-05-02,85.19,85.37,84.51,85.23,1621893,ABC -2016-05-03,84.93,86.07,84.43,85.48,1950918,ABC -2016-05-04,84.98,85.3,84.0,84.29,2256078,ABC -2016-05-05,77.95,79.68,76.82,78.0,11878192,ABC -2016-05-06,77.96,78.15,75.35,76.52,6821324,ABC -2016-05-09,76.43,78.48,76.27,77.37,3961434,ABC -2016-05-10,77.78,78.2887,76.4,76.68,3683560,ABC -2016-05-11,76.48,76.82,75.5,75.76,3441066,ABC -2016-05-12,75.92,75.92,74.63,74.78,3469278,ABC -2016-05-13,74.71,75.07,73.54,73.66,2377477,ABC -2016-05-16,73.76,74.67,73.73,74.59,2991684,ABC -2016-05-17,74.4,75.06,74.06,74.53,2971403,ABC -2016-05-18,74.2,75.27,74.08,75.16,3253704,ABC -2016-05-19,74.46,75.31,74.34,75.05,3019081,ABC -2016-05-20,75.14,75.33,74.36,74.82,2544887,ABC -2016-05-23,75.0,75.05,73.91,73.94,1730530,ABC -2016-05-24,74.37,74.59,73.89,74.34,1949125,ABC -2016-05-25,74.59,74.75,73.87,74.04,1666428,ABC -2016-05-26,74.04,74.07,73.31,73.69,1460585,ABC -2016-05-27,73.78,74.11,73.62,74.03,1945946,ABC -2016-05-31,74.2,75.05,73.89,74.98,4431248,ABC -2016-06-01,74.76,75.94,74.59,75.64,2851495,ABC -2016-06-02,75.81,77.23,75.66,77.08,3043678,ABC -2016-06-03,76.7,77.0,76.27,76.54,1912637,ABC -2016-06-06,76.3,76.645,75.95,76.45,1492281,ABC -2016-06-07,76.39,76.54,75.92,76.22,1679403,ABC -2016-06-08,76.31,76.64,75.87,76.25,2048296,ABC -2016-06-09,76.22,76.42,75.9,76.17,3146459,ABC -2016-06-10,75.87,77.24,75.37,76.46,2818140,ABC -2016-06-13,76.44,77.18,76.21,76.41,2860557,ABC -2016-06-14,76.08,76.45,74.96,75.51,2961488,ABC -2016-06-15,75.95,76.73,75.87,76.12,2394921,ABC -2016-06-16,75.67,76.25,75.05,76.0,2112075,ABC -2016-06-17,76.01,76.33,75.16,75.7,3056969,ABC -2016-06-20,76.25,77.22,76.13,76.37,2448138,ABC -2016-06-21,76.49,76.7805,76.05,76.57,1277332,ABC -2016-06-22,76.57,77.48,76.07,77.13,1935264,ABC -2016-06-23,77.66,77.77,77.03,77.55,2646996,ABC -2016-06-24,75.08,75.85,75.04,75.44,4480624,ABC -2016-06-27,75.14,75.18,73.44,73.85,2231984,ABC -2016-06-28,74.32,75.45,74.06,75.32,2811318,ABC -2016-06-29,76.08,77.5,76.03,77.26,1926778,ABC -2016-06-30,77.61,79.33,77.43,79.32,3196307,ABC -2016-07-01,79.29,80.4299,79.1095,80.16,2763283,ABC -2016-07-05,79.86,80.685,79.39,80.36,2377153,ABC -2016-07-06,79.94,81.55,79.595,81.19,2457714,ABC -2016-07-07,81.09,81.53,80.53,80.99,2508864,ABC -2016-07-08,81.71,82.079,81.25,81.92,1597452,ABC -2016-07-11,81.97,82.6,81.85,82.18,1429336,ABC -2016-07-12,82.25,84.41,82.24,84.18,2842055,ABC -2016-07-13,84.53,85.35,84.33,84.93,2492179,ABC -2016-07-14,85.35,86.12,85.255,85.95,2035706,ABC -2016-07-15,86.26,86.52,84.92,85.55,2384258,ABC -2016-07-18,85.73,86.12,85.55,86.03,1113576,ABC -2016-07-19,85.76,86.15,85.1,85.92,1978415,ABC -2016-07-20,86.39,87.05,86.01,86.64,1248823,ABC -2016-07-21,86.38,86.895,85.53,85.81,1672006,ABC -2016-07-22,86.01,86.21,85.1949,85.86,1331479,ABC -2016-07-25,85.9,85.93,85.01,85.41,1858653,ABC -2016-07-26,85.28,86.04,84.62,85.71,1646102,ABC -2016-07-27,85.71,85.8,84.66,85.42,1839847,ABC -2016-07-28,85.05,85.06,84.24,84.82,2434584,ABC -2016-07-29,84.34,85.375,84.12,85.19,1935906,ABC -2016-08-01,85.28,86.32,85.11,85.54,2930407,ABC -2016-08-02,88.64,90.54,87.58,89.43,5179162,ABC -2016-08-03,89.34,89.89,88.37,89.89,2565249,ABC -2016-08-04,89.71,89.81,88.44,88.98,1897447,ABC -2016-08-05,89.37,90.48,89.25,89.35,1527972,ABC -2016-08-08,89.71,89.71,88.47,88.67,1596276,ABC -2016-08-09,88.78,89.36,88.37,88.62,1491750,ABC -2016-08-10,88.79,88.83,87.66,87.9,1621783,ABC -2016-08-11,87.97,89.05,87.47,88.95,1669302,ABC -2016-08-12,89.14,89.19,88.33,88.91,1319515,ABC -2016-08-15,88.95,89.41,88.64,89.39,1796956,ABC -2016-08-16,88.99,90.12,88.99,89.46,1555381,ABC -2016-08-17,89.29,89.52,88.36,88.95,1496073,ABC -2016-08-18,88.54,88.99,88.11,88.97,1444667,ABC -2016-08-19,88.53,89.0,88.26,88.85,2089408,ABC -2016-08-22,88.8,88.93,88.47,88.75,1011750,ABC -2016-08-23,89.14,89.23,88.83,88.99,638202,ABC -2016-08-24,88.9,89.09,88.095,88.39,1338571,ABC -2016-08-25,88.15,88.26,85.39,85.51,3351527,ABC -2016-08-26,85.67,86.565,85.15,85.75,1872825,ABC -2016-08-29,85.82,86.445,85.3103,86.31,1352356,ABC -2016-08-30,86.5,86.87,85.68,86.04,1736837,ABC -2016-08-31,86.13,87.075,85.73,86.97,2682035,ABC -2016-09-01,87.01,87.72,86.58,87.15,1331095,ABC -2016-09-02,87.55,87.87,87.25,87.44,1217443,ABC -2016-09-06,87.3,87.798,87.0,87.63,1652301,ABC -2016-09-07,87.6,88.439,87.45,87.75,1513718,ABC -2016-09-08,87.4,87.67,87.14,87.27,2195185,ABC -2016-09-09,86.63,86.82,85.22,85.31,2470512,ABC -2016-09-12,85.01,87.14,84.95,86.95,2380689,ABC -2016-09-13,86.2,86.5499,84.98,85.25,2722591,ABC -2016-09-14,85.38,85.48,83.42,83.85,5896191,ABC -2016-09-15,83.54,84.54,83.06,84.2,2438533,ABC -2016-09-16,83.96,84.39,83.01,84.11,3459904,ABC -2016-09-19,83.91,84.96,83.07,83.26,2335948,ABC -2016-09-20,83.65,83.68,82.07,82.08,2484426,ABC -2016-09-21,82.2,83.15,81.79,83.02,2813172,ABC -2016-09-22,83.26,83.95,83.05,83.59,2270372,ABC -2016-09-23,83.55,83.78,82.97,83.33,1829170,ABC -2016-09-26,83.07,83.56,82.67,82.72,1339953,ABC -2016-09-27,82.75,83.74,82.46,83.25,1583508,ABC -2016-09-28,83.25,83.3,82.46,83.2,1310409,ABC -2016-09-29,82.92,83.28,81.25,81.25,1625347,ABC -2016-09-30,81.29,81.64,79.34,80.78,3001798,ABC -2016-10-03,80.68,80.945,79.99,80.12,1563127,ABC -2016-10-04,81.26,81.72,80.25,80.53,1680972,ABC -2016-10-05,80.71,80.8585,79.88,80.03,1745300,ABC -2016-10-06,79.58,80.29,78.9,80.14,2249064,ABC -2016-10-07,80.43,80.55,78.7,79.39,1837066,ABC -2016-10-10,79.07,80.01,78.78,79.64,1281821,ABC -2016-10-11,79.6,80.035,79.26,79.61,2409049,ABC -2016-10-12,79.69,81.05,79.42,80.61,2368027,ABC -2016-10-13,80.28,80.93,79.93,80.38,2493977,ABC -2016-10-14,80.49,80.88,79.79,79.79,1285780,ABC -2016-10-17,79.75,79.91,79.23,79.74,2187491,ABC -2016-10-18,80.46,81.24,80.165,81.09,1914247,ABC -2016-10-19,81.2,81.2,80.35,80.35,1645716,ABC -2016-10-20,80.28,81.765,80.23,80.74,2312897,ABC -2016-10-21,80.16,80.53,79.93,80.02,1210526,ABC -2016-10-24,80.34,80.88,79.94,80.04,1308294,ABC -2016-10-25,79.6,80.56,79.45,80.39,1688735,ABC -2016-10-26,80.01,80.24,79.1,79.37,2134630,ABC -2016-10-27,79.56,79.84,79.11,79.5,1595088,ABC -2016-10-28,69.13,71.41,68.38,69.14,9767107,ABC -2016-10-31,69.51,70.94,69.51,70.32,4437023,ABC -2016-11-01,69.85,71.18,69.65,69.79,4264212,ABC -2016-11-02,70.27,76.69,70.18,75.96,6161115,ABC -2016-11-03,75.87,77.47,71.21,72.32,7258555,ABC -2016-11-04,71.69,73.32,70.27,70.47,4999820,ABC -2016-11-07,71.52,72.34,70.99,71.93,3629531,ABC -2016-11-08,71.03,71.16,68.705,69.03,4787219,ABC -2016-11-09,72.98,76.15,71.75,75.61,5516306,ABC -2016-11-10,77.09,80.8025,77.09,79.59,4612159,ABC -2016-11-11,79.41,79.85,77.24,78.92,5221526,ABC -2016-11-14,82.16,82.58,79.38,81.33,4404177,ABC -2016-11-15,81.5,81.66,78.84,79.53,4285568,ABC -2016-11-16,79.16,80.21,78.66,78.86,2962264,ABC -2016-11-17,78.2,80.25,77.71,80.16,3045675,ABC -2016-11-18,79.91,79.91,78.91,79.21,3644028,ABC -2016-11-21,78.62,79.34,77.84,78.45,2607501,ABC -2016-11-22,78.77,78.77,77.77,78.41,1910352,ABC -2016-11-23,78.07,78.9,77.76,78.7,1324555,ABC -2016-11-25,79.07,80.48,78.35,78.57,1159047,ABC -2016-11-28,78.61,79.58,77.81,79.05,2124611,ABC -2016-11-29,78.65,79.36,77.53,77.63,2493695,ABC -2016-11-30,77.72,78.92,77.45,77.99,5158879,ABC -2016-12-01,77.88,78.7,77.61,77.8,1505641,ABC -2016-12-02,78.15,79.58,77.66,78.8,1733280,ABC -2016-12-05,78.89,79.98,78.51,79.54,2611163,ABC -2016-12-06,79.9,80.1,78.54,78.87,2374826,ABC -2016-12-07,78.19,78.19,73.86,76.38,4493498,ABC -2016-12-08,76.24,77.64,75.68,76.88,2567162,ABC -2016-12-09,77.19,79.22,77.02,78.9,1790907,ABC -2016-12-12,78.37,78.97,77.28,78.07,1730335,ABC -2016-12-13,78.38,79.17,78.17,79.04,1329611,ABC -2016-12-14,78.95,79.4,76.59,77.05,2282061,ABC -2016-12-15,76.8,76.83,75.46,76.67,2174638,ABC -2016-12-16,76.94,77.585,76.5,77.38,3635061,ABC -2016-12-19,77.55,78.65,77.375,78.42,1816319,ABC -2016-12-20,78.7,80.06,78.56,79.3,2934896,ABC -2016-12-21,78.96,79.575,78.61,78.85,1355812,ABC -2016-12-22,78.96,78.96,77.325,77.81,1881793,ABC -2016-12-23,78.04,78.93,77.89,78.56,596639,ABC -2016-12-27,78.42,79.27,78.3,78.82,655245,ABC -2016-12-28,78.86,79.22,78.21,78.35,862078,ABC -2016-12-29,78.61,80.24,78.59,79.34,1014792,ABC -2016-12-30,79.35,79.49,77.96,78.19,1409032,ABC -2017-01-03,78.51,83.1,78.31,82.61,4134229,ABC -2017-01-04,82.6,85.14,82.47,84.66,2561906,ABC -2017-01-05,84.38,84.83,82.62,83.68,2136224,ABC -2017-01-06,83.53,85.78,83.53,84.8,2131961,ABC -2017-01-09,84.6,86.07,84.6,85.48,2081568,ABC -2017-01-10,85.1,86.14,84.3,84.83,1860866,ABC -2017-01-11,85.15,86.315,79.94,81.53,3994919,ABC -2017-01-12,82.28,84.94,81.95,83.86,2771249,ABC -2017-01-13,84.08,85.12,83.51,84.62,1397850,ABC -2017-01-17,84.92,86.47,84.09,85.99,2103556,ABC -2017-01-18,85.95,86.18,85.19,85.51,2094559,ABC -2017-01-19,85.28,85.78,84.82,85.47,1574505,ABC -2017-01-20,85.86,86.27,83.21,83.75,3780132,ABC -2017-01-23,83.72,85.1,83.31,84.88,1420144,ABC -2017-01-24,84.7,85.19,84.06,84.41,1513452,ABC -2017-01-25,84.9,85.92,84.58,85.83,1858179,ABC -2017-01-26,81.04,85.195,81.04,83.39,3482915,ABC -2017-01-27,83.68,83.68,82.07,82.43,1865412,ABC -2017-01-30,82.55,83.75,80.82,83.62,2650040,ABC -2017-01-31,86.62,87.84,83.0,87.28,5827271,ABC -2017-02-01,87.71,88.7,86.09,88.61,3361963,ABC -2017-02-02,88.05,88.78,86.57,88.05,3836359,ABC -2017-02-03,88.57,89.61,87.74,89.28,2990323,ABC -2017-02-06,89.17,91.65,88.931,89.98,2869751,ABC -2017-02-07,89.85,91.64,89.71,90.33,2108481,ABC -2017-02-08,90.18,91.11,89.675,91.07,1469758,ABC -2017-02-09,91.05,91.78,90.16,91.65,1630654,ABC -2017-02-10,91.54,91.75,90.266,91.17,1527574,ABC -2017-02-13,91.53,94.5,89.42,90.48,3038167,ABC -2017-02-14,90.0,91.09,89.73,90.82,2066338,ABC -2017-02-15,90.65,91.47,89.66,91.18,1944692,ABC -2017-02-16,91.0,91.54,89.0,90.32,2282876,ABC -2017-02-17,90.24,90.88,89.72,90.76,1441214,ABC -2017-02-21,90.74,91.15,89.56,91.01,2061161,ABC -2017-02-22,91.19,92.2,91.01,91.34,1152878,ABC -2017-02-23,91.45,92.396,90.96,91.61,2169821,ABC -2017-02-24,91.51,92.16,90.53,92.16,1001677,ABC -2017-02-27,92.31,92.74,91.3,91.73,1140061,ABC -2017-02-28,91.9,91.93,90.98,91.51,1916323,ABC -2017-03-01,92.19,92.807,91.69,92.23,1394379,ABC -2017-03-02,91.89,92.73,91.45,91.94,1438448,ABC -2017-03-03,92.22,92.67,91.47,92.23,1405380,ABC -2017-03-06,91.58,91.87,89.74,89.99,1807994,ABC -2017-03-07,89.23,89.8,87.92,88.48,2612066,ABC -2017-03-08,88.32,89.97,88.32,89.08,1847565,ABC -2017-03-09,89.4,89.4,88.08,88.61,1125654,ABC -2017-03-10,88.99,89.0,87.76,88.31,1545634,ABC -2017-03-13,88.76,89.25,88.22,89.14,1673888,ABC -2017-03-14,88.96,89.2,88.04,88.81,1243545,ABC -2017-03-15,88.85,89.57,88.14,89.48,1800390,ABC -2017-03-16,89.58,89.76,87.89,88.0,1520046,ABC -2017-03-17,87.91,88.325,86.88,87.53,2296128,ABC -2017-03-20,87.65,87.8915,86.25,86.91,1618707,ABC -2017-03-21,87.0,87.29,85.77,85.91,1988779,ABC -2017-03-22,86.12,86.28,85.22,85.88,1189688,ABC -2017-03-23,85.86,86.75,85.79,85.99,1440204,ABC -2017-03-24,85.99,86.83,85.72,86.56,1241019,ABC -2017-03-27,86.14,87.545,86.12,87.48,1558617,ABC -2017-03-28,87.12,87.97,87.12,87.5,1199881,ABC -2017-03-29,87.4,89.77,87.4,89.51,2281057,ABC -2017-03-30,89.35,89.88,89.03,89.14,1395100,ABC -2017-03-31,88.87,89.65,88.47,88.5,1793069,ABC -2017-04-03,88.41,88.63,86.8,87.33,1379461,ABC -2017-04-04,87.06,87.99,86.71,86.93,1372429,ABC -2017-04-05,87.44,88.26,86.99,87.18,1187096,ABC -2017-04-06,87.26,87.69,86.85,87.64,1586146,ABC -2017-04-07,87.57,88.07,87.29,87.62,933399,ABC -2017-04-10,87.75,89.25,87.59,88.44,1007847,ABC -2017-04-11,88.28,88.35,86.82,87.87,1449626,ABC -2017-04-12,87.15,87.59,86.14,86.82,1887694,ABC -2017-04-13,86.64,87.29,86.36,86.67,787500,ABC -2017-04-17,86.64,86.72,86.21,86.64,1284425,ABC -2017-04-18,83.17,83.44,81.13,82.56,4384149,ABC -2017-04-19,82.64,83.09,81.62,81.85,1904924,ABC -2017-04-20,82.23,82.93,81.62,82.37,2058848,ABC -2017-04-21,82.2,82.2905,81.11,81.26,1138873,ABC -2017-04-24,82.03,82.105,80.81,80.94,2300122,ABC -2017-04-25,80.82,81.57,79.4,81.49,1988130,ABC -2017-04-26,81.5,82.17,81.2,81.25,1219388,ABC -2017-04-27,81.38,82.19,81.3,82.12,1671300,ABC -2017-04-28,82.17,82.4,81.61,82.05,1477044,ABC -2017-05-01,82.32,83.01,81.86,82.49,1708821,ABC -2017-05-02,82.76,82.92,81.07,82.66,1825633,ABC -2017-05-03,82.45,83.11,81.6,82.67,2920282,ABC -2017-05-04,83.5,87.97,83.5,86.53,3767554,ABC -2017-05-05,86.66,86.97,85.54,85.89,1707344,ABC -2017-05-08,86.21,86.98,86.02,86.59,1791265,ABC -2017-05-09,86.7,88.21,86.67,87.54,1924109,ABC -2017-05-10,87.57,88.71,87.56,88.14,1297750,ABC -2017-05-11,87.84,88.935,87.8,88.86,2145777,ABC -2017-05-12,88.61,88.97,88.44,88.75,1202105,ABC -2017-05-15,88.49,90.99,88.47,89.92,2107545,ABC -2017-05-16,89.84,89.84,88.57,89.12,1273184,ABC -2017-05-17,87.48,88.45,86.13,86.28,1797010,ABC -2017-05-18,86.26,87.66,86.1201,86.69,1152127,ABC -2017-05-19,87.62,89.79,87.05,89.16,1812784,ABC -2017-05-22,89.0,90.2,89.0,89.48,1343342,ABC -2017-05-23,89.73,89.85,89.03,89.19,1709784,ABC -2017-05-24,89.38,90.32,88.55,89.97,1266722,ABC -2017-05-25,90.0,91.43,89.83,91.11,1047084,ABC -2017-05-26,91.19,91.5,90.91,91.0,892137,ABC -2017-05-30,90.66,91.39,90.53,91.03,1248400,ABC -2017-05-31,91.33,91.79,90.75,91.77,1870194,ABC -2017-06-01,91.71,93.3,91.44,93.14,1326944,ABC -2017-06-02,93.5,93.84,92.71,93.58,1284988,ABC -2017-06-05,93.2,93.57,92.51,93.32,1050825,ABC -2017-06-06,93.19,93.2,91.46,91.69,1377412,ABC -2017-06-07,91.95,92.23,91.45,91.93,1182823,ABC -2017-06-08,92.19,92.19,91.21,91.57,1113609,ABC -2017-06-09,91.28,92.31,91.025,92.1,1157148,ABC -2017-06-12,91.89,92.75,91.89,92.36,816548,ABC -2017-06-13,92.51,93.0,92.03,93.0,636448,ABC -2017-06-14,93.0,94.592,92.9,93.9,901945,ABC -2017-06-15,93.5,95.18,92.75,94.43,1733738,ABC -2017-06-16,94.54,94.56,91.74,92.53,1880554,ABC -2017-06-19,92.61,94.02,91.76,93.75,1072896,ABC -2017-06-20,93.75,93.9,92.81,93.25,1089133,ABC -2017-06-21,93.93,95.65,92.9,95.62,1092668,ABC -2017-06-22,95.7,97.67,95.5,96.37,1730046,ABC -2017-06-23,96.5,96.809,95.3,95.82,1624071,ABC -2017-06-26,96.2,96.72,95.63,95.99,1171116,ABC -2017-06-27,95.92,96.7,95.1,95.53,1253124,ABC -2017-06-28,96.17,97.11,96.07,96.38,897599,ABC -2017-06-29,96.68,97.85,95.39,95.76,1053253,ABC -2017-06-30,94.94,95.23,93.35,94.53,1236094,ABC -2017-07-03,94.86,95.36,94.4,94.7,527371,ABC -2017-07-05,94.91,95.68,94.25,95.22,908931,ABC -2017-07-06,94.69,95.19,92.24,92.51,951073,ABC -2017-07-07,93.04,94.26,92.74,93.91,1158714,ABC -2017-07-10,93.65,94.54,92.28,92.49,1255339,ABC -2017-07-11,92.65,92.65,91.15,92.18,911512,ABC -2017-07-12,92.84,93.7,92.61,93.21,655133,ABC -2017-07-13,93.55,93.79,92.32,92.86,788780,ABC -2017-07-14,93.06,93.42,92.53,92.96,859020,ABC -2017-07-17,93.05,93.37,92.55,92.71,1017656,ABC -2017-07-18,92.68,92.68,90.97,91.37,1299283,ABC -2017-07-19,91.56,91.67,90.3,91.17,1331601,ABC -2017-07-20,91.2,92.35,90.62,91.95,1136414,ABC -2017-07-21,91.82,92.08,91.44,91.62,604429,ABC -2017-07-24,91.64,93.66,91.34,93.13,1273706,ABC -2017-07-25,93.4,95.11,92.6,94.65,1413497,ABC -2017-07-26,94.27,94.725,93.41,94.21,1366129,ABC -2017-07-27,93.49,93.67,91.86,92.78,1493251,ABC -2017-07-28,93.0,94.17,92.38,93.97,869223,ABC -2017-07-31,94.3,94.42,93.25,93.82,1067330,ABC -2017-08-01,92.19,92.19,89.09,91.27,2301036,ABC -2017-08-02,92.19,92.19,89.09,91.27,2301075,ABC -2017-08-03,84.11,85.33,80.51,81.71,7434738,ABC -2017-08-04,81.71,81.97,79.58,80.83,2882893,ABC -2017-08-07,80.8,81.03,79.24,80.58,2287101,ABC -2017-08-08,80.91,82.3399,80.13,80.33,1742069,ABC -2017-08-09,79.86,79.95,78.6,79.8,2620128,ABC -2017-08-10,79.43,80.025,78.13,78.88,2417110,ABC -2017-08-11,78.83,80.31,78.73,80.02,2214779,ABC -2017-08-14,80.31,80.53,79.66,80.08,2032876,ABC -2017-08-15,80.25,80.425,79.78,80.0,2126689,ABC -2017-08-16,80.29,80.58,79.5307,80.03,1599790,ABC -2017-08-17,79.68,81.34,79.68,79.94,1727372,ABC -2017-08-18,79.69,79.89,78.46,78.6,2091927,ABC -2017-08-21,78.43,78.59,77.9,78.04,1455125,ABC -2017-08-22,78.24,79.46,78.09,79.23,1372974,ABC -2017-08-23,78.98,79.0,77.96,78.14,1455723,ABC -2017-08-24,78.33,78.9,77.855,78.4,1191129,ABC -2017-08-25,78.39,78.9522,77.97,78.1,1154263,ABC -2017-08-28,78.38,78.88,77.69,78.66,1210930,ABC -2017-08-29,78.16,79.08,77.84,78.56,1279567,ABC -2017-08-30,78.43,78.53,77.79,78.33,1267028,ABC -2017-08-31,78.65,80.5,78.46,80.25,1301215,ABC -2017-09-01,80.46,80.61,79.22,80.24,1248437,ABC -2017-09-05,79.81,80.78,79.05,79.69,1123869,ABC -2017-09-06,80.0,80.935,79.84,80.72,1308264,ABC -2017-09-07,80.82,83.03,80.65,82.56,1825285,ABC -2017-09-08,82.54,83.94,82.33,82.5,1936786,ABC -2017-09-11,83.33,84.56,82.89,84.39,1230843,ABC -2017-09-12,84.19,84.805,83.11,83.9,1809742,ABC -2017-09-13,83.76,84.344,83.06,84.32,1310879,ABC -2017-09-14,82.32,82.32,80.44,80.69,2872119,ABC -2017-09-15,80.47,81.73,80.02,81.6,2732342,ABC -2017-09-18,81.8,82.03,80.54,80.67,1350697,ABC -2017-09-19,80.53,81.04,79.13,79.28,1648515,ABC -2017-09-20,79.47,80.09,78.3399,79.5,1324955,ABC -2017-09-21,79.49,80.94,79.2,80.38,1035888,ABC -2017-09-22,80.53,81.2,80.33,81.05,835921,ABC -2017-09-25,80.23,81.36,80.14,80.98,717129,ABC -2017-09-26,81.41,82.65,81.33,82.0,1018543,ABC -2017-09-27,82.17,82.8,81.02,82.43,960866,ABC -2017-09-28,81.85,81.88,80.27,80.99,1120810,ABC -2017-09-29,81.71,83.84,81.42,82.75,1629417,ABC -2017-10-02,82.97,83.63,82.38,83.3,1932971,ABC -2017-10-03,83.33,83.7287,82.515,82.88,1141899,ABC -2017-10-04,82.76,84.14,82.56,82.74,2206922,ABC -2017-10-05,84.16,85.29,83.83,84.12,2136222,ABC -2017-10-06,84.23,84.5,80.04,80.48,2919861,ABC -2017-10-09,80.21,80.49,77.87,78.35,2644461,ABC -2017-10-10,79.49,80.4,79.01,79.55,1620280,ABC -2017-10-11,79.45,80.5,79.25,80.32,1004554,ABC -2017-10-12,80.37,81.2984,80.37,80.8,1195264,ABC -2017-10-13,80.42,81.41,77.77,79.44,2469252,ABC -2017-10-16,79.02,81.04,78.4,79.0,2545489,ABC -2017-10-17,78.89,80.52,78.4,79.95,1043927,ABC -2017-10-18,80.13,81.66,80.13,80.49,1699309,ABC -2017-10-19,80.82,82.55,80.735,82.2,1436645,ABC -2017-10-20,82.57,83.77,82.35,83.21,1297292,ABC -2017-10-23,83.17,83.4299,81.7658,82.53,1116159,ABC -2017-10-24,82.69,82.98,80.64,81.85,1166044,ABC -2017-10-25,82.0,82.52,81.41,82.23,1266446,ABC -2017-10-26,83.97,85.8,77.47,78.77,4756230,ABC -2017-10-27,78.59,78.77,75.66,76.38,4690478,ABC -2017-10-30,76.01,77.78,76.01,76.93,2164912,ABC -2017-10-31,76.85,78.99,75.46,76.95,3132825,ABC -2017-11-01,77.48,77.495,76.31,76.62,2546091,ABC -2017-11-02,77.02,78.93,71.9,73.23,3840211,ABC -2017-11-03,73.34,75.3,73.32,74.8,2726048,ABC -2017-11-06,74.8,75.2,73.19,74.86,3478259,ABC -2017-11-07,74.92,75.73,74.56,74.91,1777500,ABC -2017-11-08,74.87,75.41,73.35,75.01,1710169,ABC -2017-11-09,75.07,76.63,74.9,75.27,2576836,ABC -2017-11-10,75.05,76.68,74.05,75.0,3005741,ABC -2017-11-13,74.69,77.0,74.31,75.4,2892626,ABC -2017-11-14,75.1,75.48,74.45,75.4,1863339,ABC -2017-11-15,75.32,77.66,74.6,77.53,2731010,ABC -2017-11-16,77.72,80.71,77.41,80.31,2986809,ABC -2017-11-17,79.65,80.03,77.58,78.84,2196698,ABC -2017-11-20,78.3,78.76,76.48,77.46,1688577,ABC -2017-11-21,77.83,79.57,77.5677,79.46,1598746,ABC -2017-11-22,79.98,81.99,79.58,81.4,1927033,ABC -2017-11-24,81.9,81.98,80.22,81.23,701755,ABC -2017-11-27,81.07,81.78,80.65,81.12,1181051,ABC -2017-11-28,81.4,83.185,80.65,83.15,1549759,ABC -2017-11-29,83.56,85.63,83.045,84.66,2380893,ABC -2017-11-30,85.0,86.1,83.89,84.82,1808323,ABC -2017-12-01,83.98,85.25,81.2076,84.95,3188555,ABC -2017-12-04,85.73,88.2,85.44,87.42,2132534,ABC -2017-12-05,87.49,87.56,85.07,85.89,1906139,ABC -2017-12-06,86.2,86.79,85.37,85.99,1524773,ABC -2017-12-07,86.27,86.84,85.43,85.93,1763704,ABC -2017-12-08,86.06,86.83,85.01,86.7,1734132,ABC -2017-12-11,87.05,87.5,86.29,86.77,1345768,ABC -2017-12-12,87.37,87.87,86.79,87.82,1397995,ABC -2017-12-13,87.54,89.24,87.36,88.48,1391586,ABC -2017-12-14,88.95,90.8,88.205,89.31,2920744,ABC -2017-12-15,91.17,94.06,90.78,93.17,4153406,ABC -2017-12-18,92.25,92.95,91.755,92.4,1880538,ABC -2017-12-19,92.66,94.44,92.33,93.79,2013605,ABC -2017-12-20,94.075,94.24,92.96,93.74,1442413,ABC -2017-12-21,93.83,94.16,92.445,92.99,1405432,ABC -2017-12-22,92.67,93.41,91.5,92.46,1332206,ABC -2017-12-26,92.48,93.88,91.91,93.25,734272,ABC -2017-12-27,93.18,93.2,92.001,92.6,734799,ABC -2017-12-28,92.88,92.93,91.745,92.59,1152493,ABC -2017-12-29,92.89,93.6,91.755,91.82,777786,ABC -2018-01-02,92.15,94.07,92.12,94.04,1138157,ABC -2018-01-03,93.4,94.73,92.47,94.39,1035206,ABC -2018-01-04,94.64,94.83,92.86,94.18,1781646,ABC -2018-01-05,93.77,95.63,93.77,95.32,1342778,ABC -2018-01-08,95.11,97.81,94.81,96.9,1731070,ABC -2018-01-09,96.95,97.86,95.91,97.52,1783471,ABC -2018-01-10,96.54,97.31,94.75,97.21,1070249,ABC -2018-01-11,97.4,98.14,96.52,98.14,780736,ABC -2018-01-12,98.5,99.0,97.55,99.0,1469349,ABC -2018-01-16,99.1,100.67,98.97,99.55,1869731,ABC -2018-01-17,99.77,101.85,99.77,101.3,1864850,ABC -2018-01-18,101.53,101.64,99.75,100.86,1895835,ABC -2018-01-19,100.87,101.58,99.63,100.06,1371361,ABC -2018-01-22,100.28,102.73,99.66,102.66,1313414,ABC -2018-01-23,103.0,103.67,102.1,102.81,1371502,ABC -2018-01-24,103.29,104.96,102.9,104.62,2111679,ABC -2018-01-25,104.67,104.67,103.435,104.37,992747,ABC -2018-01-26,104.56,105.73,104.11,105.48,1453297,ABC -2018-01-29,105.14,106.27,104.865,105.09,1422994,ABC -2018-01-30,102.0,103.65,100.03,102.46,3113941,ABC -2018-01-31,102.98,103.04,99.06,99.67,2458633,ABC -2018-02-01,97.74,99.81,95.73,99.29,2786798,ABC -2018-02-02,99.09,99.09,95.91,96.02,1660267,ABC -2018-02-05,95.62,96.52,91.69,91.9,2278534,ABC -2018-02-06,92.58,93.37,86.9403,91.54,4574997,ABC -2018-02-07,91.6,95.34,91.1,94.22,2509484,ABC diff --git a/docs/examples/openai/openai_async_chat.py b/docs/examples/openai/openai_async_chat.py deleted file mode 100644 index 50d81de..0000000 --- a/docs/examples/openai/openai_async_chat.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -OpenAI's with async/await. -""" - -import panel as pn -from openai import AsyncOpenAI - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - response = await aclient.chat.completions.create( - model="gpt-3.5-turbo", - messages=[{"role": "user", "content": contents}], - stream=True, - ) - message = "" - async for chunk in response: - part = chunk.choices[0].delta.content - if part is not None: - message += part - yield message - - -aclient = AsyncOpenAI() -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="ChatGPT") -chat_interface.send( - "Send a message to get a reply from ChatGPT!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/openai/openai_authentication.py b/docs/examples/openai/openai_authentication.py deleted file mode 100644 index 39bb2fd..0000000 --- a/docs/examples/openai/openai_authentication.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` widget with authentication for -OpenAI's API. -""" - -import os - -import panel as pn -from openai import AsyncOpenAI - -SYSTEM_KWARGS = dict( - user="System", - respond=False, -) - -pn.extension() - - -def add_key_to_env(key): - if not key.startswith("sk-"): - chat_interface.send("Please enter a valid OpenAI key!", **SYSTEM_KWARGS) - return - - chat_interface.send( - "Your OpenAI key has been set. Feel free to minimize the sidebar.", - **SYSTEM_KWARGS, - ) - chat_interface.disabled = False - - -key_input = pn.widgets.PasswordInput(placeholder="sk-...", name="OpenAI Key") -pn.bind(add_key_to_env, key=key_input, watch=True) - - -async def callback( - contents: str, - user: str, - instance: pn.chat.ChatInterface, -): - if "OPENAI_API_KEY" not in os.environ: - yield "Please first set your OpenAI key in the sidebar!" - return - - response = await aclient.chat.completions.create( - model="gpt-3.5-turbo", - messages=[{"role": "user", "content": contents}], - stream=True, - api_key=key_input.value, - ) - message = "" - async for chunk in response: - part = chunk.choices[0].delta.content - if part is not None: - message += part - yield message - - -aclient = AsyncOpenAI() -chat_interface = pn.chat.ChatInterface(callback=callback, disabled=True) -chat_interface.send( - "First enter your OpenAI key in the sidebar, then send a message!", **SYSTEM_KWARGS -) - -pn.template.MaterialTemplate( - title="OpenAI ChatInterface with authentication", - sidebar=[key_input], - main=[chat_interface], -).servable() diff --git a/docs/examples/openai/openai_chat.py b/docs/examples/openai/openai_chat.py deleted file mode 100644 index 75357dd..0000000 --- a/docs/examples/openai/openai_chat.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -OpenAI's API. -""" - -import panel as pn -from openai import OpenAI - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - response = client.chat.completions.create( - model="gpt-3.5-turbo", - messages=[{"role": "user", "content": contents}], - stream=True, - ) - message = "" - for chunk in response: - part = chunk.choices[0].delta.content - if part is not None: - message += part - yield message - - -client = OpenAI() -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="ChatGPT") -chat_interface.send( - "Send a message to get a reply from ChatGPT!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/openai/openai_hvplot.py b/docs/examples/openai/openai_hvplot.py deleted file mode 100644 index c1e99f2..0000000 --- a/docs/examples/openai/openai_hvplot.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a **simple chatbot** -that can generate plots of your data using [hvPlot](https://hvplot.holoviz.org/). -""" - -import re -from typing import Union - -import pandas as pd -import panel as pn -from openai import AsyncOpenAI -from panel.io.mime_render import exec_with_return - -DATAFRAME_PROMPT = """ - Here are the columns in your DataFrame: {columns}. - Create a plot with hvplot that highlights an interesting - relationship between the columns with hvplot groupby kwarg. -""" - -CODE_REGEX = re.compile(r"```\s?python(.*?)```", re.DOTALL) - - -def _clean(df: pd.DataFrame): - df.columns = [column.strip() for column in df.columns] - df = df.head(100) - return df - - -async def respond_with_openai(contents: Union[pd.DataFrame, str]): - # extract the DataFrame - if isinstance(contents, pd.DataFrame): - global df - df = _clean(contents) - columns = contents.columns - message = DATAFRAME_PROMPT.format(columns=columns) - else: - message = contents - - response = await aclient.chat.completions.create( - model="gpt-3.5-turbo", - messages=[{"role": "user", "content": message}], - temperature=0, - max_tokens=500, - stream=True, - ) - message = "" - async for chunk in response: - part = chunk.choices[0].delta.content - if part is not None: - message += part - yield {"user": "ChatGPT", "object": message} - - -async def respond_with_executor(code: str): - code_block = f"```python\n{code}\n```" - global df - context = {"df": df} - plot = exec_with_return(code=code, global_context=context) - return { - "user": "Executor", - "object": pn.Tabs( - ("Plot", plot), - ("Code", code_block), - ), - } - - -async def callback( - contents: Union[str, pd.DataFrame], - name: str, - instance: pn.chat.ChatInterface, -): - if not isinstance(contents, (str, pd.DataFrame)): - return - - if name == "User": - async for chunk in respond_with_openai(contents): - yield chunk - instance.respond() - elif CODE_REGEX.search(contents): - yield await respond_with_executor(CODE_REGEX.search(contents).group(1)) - - -aclient = AsyncOpenAI() -chat_interface = pn.chat.ChatInterface( - widgets=[pn.widgets.FileInput(name="Upload"), pn.widgets.TextInput(name="Message")], - callback=callback, -) -# ruff: noqa: E501 -chat_interface.send( - """Send a message to ChatGPT or upload a small CSV file to get started! - -example.csv -""", - user="System", - respond=False, -) -chat_interface.servable() diff --git a/docs/examples/openai/openai_image_generation.py b/docs/examples/openai/openai_image_generation.py deleted file mode 100644 index 3ac943a..0000000 --- a/docs/examples/openai/openai_image_generation.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create images using -OpenAI's [DALL-E API](https://platform.openai.com/docs/guides/images/image-generation). -""" - -import panel as pn -from openai import OpenAI - -pn.extension() - - -def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - response = client.images.generate(prompt=contents, n=1, size="256x256") - image_url = response.data[0].url - return pn.pane.Image(image_url, width=256, height=256) - - -client = OpenAI() -chat_interface = pn.chat.ChatInterface( - callback=callback, callback_user="DALL-E", placeholder_text="Generating..." -) -chat_interface.send( - "Create an image by providing a prompt!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/openai/openai_with_memory.py b/docs/examples/openai/openai_with_memory.py deleted file mode 100644 index 738366d..0000000 --- a/docs/examples/openai/openai_with_memory.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Demonstrates how to use the `ChatInterface` to create a chatbot using -OpenAI's with async/await. -""" - -import panel as pn -from openai import AsyncOpenAI - -pn.extension() - - -async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): - messages = instance.serialize()[1:] - response = await aclient.chat.completions.create( - model="gpt-3.5-turbo", - messages=messages, - stream=True, - ) - message = "" - async for chunk in response: - part = chunk.choices[0].delta.content - if part is not None: - message += part - yield message - - -aclient = AsyncOpenAI() -chat_interface = pn.chat.ChatInterface(callback=callback, callback_user="ChatGPT") -chat_interface.send( - "Send a message to get a reply from ChatGPT!", user="System", respond=False -) -chat_interface.servable() diff --git a/docs/examples/openai/tool_renderer.json b/docs/examples/openai/tool_renderer.json deleted file mode 100644 index cc8cc4f..0000000 --- a/docs/examples/openai/tool_renderer.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "type": "function", - "function": { - "name": "renderer", - "description": "Renders a plot using a named backend like 'bokeh', 'matplotlib' or 'plotly'", - "parameters": { - "type": "object", - "properties": { - "backend": { - "type": "string", - "enum": ["bokeh", "matplotlib", "bokeh"], - "default": "bokeh", - "description": "The name of the backend to render the plot with. One of 'bokeh', 'matplotlib' or 'plotly'" - } - }, - "required": [ - "backend" - ] - } - } -} diff --git a/docs/kickstart_snippets.md b/docs/kickstart_snippets.md new file mode 100644 index 0000000..434b1ea --- /dev/null +++ b/docs/kickstart_snippets.md @@ -0,0 +1,456 @@ +# Kickstart Snippets +Quickly start using Panel's chat components with popular LLM packages by copying and pasting one of these snippets. All of these examples support: + +- Streaming +- Async +- Memory + + +## Llama Cpp Python + +Demonstrates how to use LlamaCpp with a local, quantized model, like TheBloke's Mistral Instruct v0.2, +with Panel's ChatInterface. + +Highlights: + +- Uses `pn.state.onload` to load the model from Hugging Face Hub when the app is loaded and prevent blocking the app. +- Uses `pn.state.cache` to store the `Llama` instance. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. + +
+ +Source code for llama_cpp_python_.py + +```python +""" +Demonstrates how to use LlamaCpp with a local, quantized model, like TheBloke's Mistral Instruct v0.2, +with Panel's ChatInterface. + +Highlights: + +- Uses `pn.state.onload` to load the model from Hugging Face Hub when the app is loaded and prevent blocking the app. +- Uses `pn.state.cache` to store the `Llama` instance. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. +""" + +import panel as pn +from huggingface_hub import hf_hub_download +from llama_cpp import Llama + +REPO_ID = "TheBloke/Mistral-7B-Instruct-v0.2-code-ft-GGUF" +FILENAME = "mistral-7b-instruct-v0.2-code-ft.Q5_K_S.gguf" + +pn.extension() + + +def load_model(): + model_path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME) + pn.state.cache["llama"] = Llama( + model_path=model_path, + chat_format="mistral-instruct", + verbose=False, + n_gpu_layers=-1, + ) + chat_interface.disabled = False + + +def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + # memory is a list of messages + messages = instance.serialize() + + llama = pn.state.cache["llama"] + response = llama.create_chat_completion_openai_v1(messages=messages, stream=True) + + message = "" + for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="LlamaCpp", + help_text="Send a message to get a reply from LlamaCpp!", + disabled=True, +) +template = pn.template.FastListTemplate( + title="LlamaCpp Mistral", + header_background="#A0A0A0", + main=[chat_interface], +) +pn.state.onload(load_model) +template.servable() +``` +
+ + +## Mistralai + +Demonstrates how to use MistralAI's Small API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `MISTRAL_API_KEY` environment variable. +- Runs `pn.bind` to update the `MistralAsyncClient` when the `api_key` changes and pn.state.cache to store the client. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. + + + + + +
+ +Source code for mistralai_.py + +```python +""" +Demonstrates how to use MistralAI's Small API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `MISTRAL_API_KEY` environment variable. +- Runs `pn.bind` to update the `MistralAsyncClient` when the `api_key` changes and pn.state.cache to store the client. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response. +""" + +import panel as pn +from mistralai.async_client import MistralAsyncClient + +pn.extension() + + +def update_api_key(api_key): + # use api_key_input.value if set, otherwise use MISTRAL_API_KEY + pn.state.cache["aclient"] = ( + MistralAsyncClient(api_key=api_key) if api_key else MistralAsyncClient() + ) + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + # memory is a list of messages + messages = instance.serialize() + + response = pn.state.cache["aclient"].chat_stream( + model="mistral-small", + messages=messages, + ) + + message = "" + async for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +api_key_input = pn.widgets.PasswordInput( + placeholder="Uses $MISTRAL_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +pn.bind(update_api_key, api_key_input, watch=True) +api_key_input.param.trigger("value") + +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="MistralAI", + help_text="Send a message to get a reply from MistralAI!", + callback_exception="verbose", +) +template = pn.template.FastListTemplate( + title="MistralAI Small", + header_background="#FF7000", + main=[chat_interface], + header=[api_key_input], +) +template.servable() +``` +
+ + +## Langchain + +Demonstrates how to use LangChain to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response + + + + + +
+ +Source code for langchain_.py + +```python +""" +Demonstrates how to use LangChain to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +from operator import itemgetter + +import panel as pn +from langchain.memory import ConversationTokenBufferMemory +from langchain_core.messages import AIMessage, HumanMessage +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnableLambda, RunnablePassthrough +from langchain_openai import ChatOpenAI + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + llm.api_key = api_key_input.value + + memory.clear() + for message in instance.serialize(): + if message["role"] == "user": + memory.chat_memory.add_user_message(HumanMessage(**message)) + else: + memory.chat_memory.add_ai_message(AIMessage(**message)) + + response = chain.astream({"user_input": contents}) + + message = "" + async for chunk in response: + message += chunk + yield message + + +llm = ChatOpenAI(model="gpt-3.5-turbo") +memory = ConversationTokenBufferMemory( + return_messages=True, + llm=llm, + memory_key="chat_history", + max_token_limit=8192 - 1024, +) +memory_link = RunnablePassthrough.assign( + chat_history=RunnableLambda(memory.load_memory_variables) + | itemgetter("chat_history") +) +prompt_link = ChatPromptTemplate.from_template( + "{chat_history}\n\nBe a helpful chat bot and answer: {user_input}", +) +output_parser = StrOutputParser() + +chain = ( + {"user_input": RunnablePassthrough()} + | memory_link + | prompt_link + | llm + | output_parser +) + +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT 3.5 Turbo!", + callback_exception="verbose", +) +template = pn.template.FastListTemplate( + title="LangChain OpenAI GPT-3.5", + header_background="#E8B0E6", + main=[chat_interface], + header=[api_key_input], +) +template.servable() +``` +
+ + +## Openai + +Demonstrates how to use OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response + + + + + +
+ +Source code for openai_.py + +```python +""" +Demonstrates how to use OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +import panel as pn +from openai import AsyncOpenAI + +pn.extension() + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + aclient.api_key = api_key_input.value + + # memory is a list of messages + messages = instance.serialize() + + response = await aclient.chat.completions.create( + model="gpt-3.5-turbo", + messages=messages, + stream=True, + ) + + message = "" + async for chunk in response: + part = chunk.choices[0].delta.content + if part is not None: + message += part + yield message + + +aclient = AsyncOpenAI() +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT-3.5 Turbo!", +) +template = pn.template.FastListTemplate( + title="OpenAI GPT-3.5", + header_background="#212121", + main=[chat_interface], + header=[api_key_input], +) +template.servable() +``` +
+ + +## Llama Index + +Demonstrates how to use LlamaIndex to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response + +
+ +Source code for llama_index_.py + +```python +""" +Demonstrates how to use LlamaIndex to wrap OpenAI's GPT-3.5 API with Panel's ChatInterface. + +Highlights: + +- Uses `PasswordInput` to set the API key, or uses the `OPENAI_API_KEY` environment variable. +- Uses `serialize` to get chat history from the `ChatInterface`. +- Uses `yield` to continuously concatenate the parts of the response +""" + +import panel as pn +from llama_index.core.agent import ReActAgent +from llama_index.core.llms import ChatMessage +from llama_index.core.tools import FunctionTool +from llama_index.llms.openai import OpenAI + +pn.extension() + + +def multiply(a: int, b: int) -> int: + """Multiple two integers and returns the result integer""" + return a * b + + +async def callback(contents: str, user: str, instance: pn.chat.ChatInterface): + if api_key_input.value: + # use api_key_input.value if set, otherwise use OPENAI_API_KEY + llm.api_key = api_key_input.value + + # memory is a list of messages + messages = [ChatMessage(**message) for message in instance.serialize()] + + response = await llm.astream_chat( + model="gpt-3.5-turbo", + messages=messages, + ) + + async for chunk in response: + message = chunk.message.content + yield str(message) + + +llm = OpenAI(model="gpt-3.5-turbo-0613") + +multiply_tool = FunctionTool.from_defaults(fn=multiply) +agent = ReActAgent.from_tools([multiply_tool], llm=llm, verbose=True) + +api_key_input = pn.widgets.PasswordInput( + placeholder="sk-... uses $OPENAI_API_KEY if not set", + sizing_mode="stretch_width", + styles={"color": "black"}, +) +chat_interface = pn.chat.ChatInterface( + callback=callback, + callback_user="GPT-3.5", + help_text="Send a message to get a reply from GPT 3.5 Turbo!", +) +template = pn.template.FastListTemplate( + title="LlamaIndex OpenAI GPT-3.5", + header_background="#83CBF2", + main=[chat_interface], + header=[api_key_input], +) +template.servable() +``` +
diff --git a/docs/external_resources.md b/docs/linked_resources.md similarity index 94% rename from docs/external_resources.md rename to docs/linked_resources.md index 1074c31..cc58fd9 100644 --- a/docs/external_resources.md +++ b/docs/linked_resources.md @@ -1,4 +1,4 @@ -# External Resources +# Linked Resources Below we list **awesome external resources**. @@ -18,8 +18,6 @@ Authors: [Andrew Huang](https://twitter.com/IAteAnDrew1) | [Sophia Yang](https:/ Link: [Medium](https://sophiamyang.medium.com/how-to-build-your-own-panel-ai-chatbots-ef764f7f114e) -![Getting Started Blog by Andrew and Sophia](assets/thumbnails/basic_chat.png) - ## Autogen [Autogen](https://microsoft.github.io/autogen/) by Microsoft is a framework that enables @@ -86,8 +84,6 @@ Authors: [Andrew Huang](https://twitter.com/IAteAnDrew1) | [Sophia Yang](https:/ Links: [Medium](https://sophiamyang.medium.com/building-ai-chatbots-with-mistral-and-llama2-9c0f5abc296c) -[![ChatBot with Mistral and Llama2](assets/thumbnails/mistral_and_llama.png)](https://sophiamyang.medium.com/building-ai-chatbots-with-mistral-and-llama2-9c0f5abc296c) - ### Talk with Plot Authors: [Andrew Huang](https://twitter.com/IAteAnDrew1) | [Sophia Yang](https://twitter.com/sophiamyang) diff --git a/docs/llama_index.md b/docs/llama_index.md deleted file mode 100644 index daa78ec..0000000 --- a/docs/llama_index.md +++ /dev/null @@ -1,19 +0,0 @@ -# LlamaIndex - -Our examples for [LlamaIndex](https://www.llamaindex.ai/) are available as the -[`panel_chatbot`](https://llamahub.ai/l/llama_packs-panel_chatbot) pack on -LlamaHub. - -Please [give it a ⭐](https://llamahub.ai/l/llama_packs-panel_chatbot) if you like it. - -## Talk to Github - -Demonstrates how to create a chat bot to talk to any GitHub Repository. - - - -Source: Talk to Github diff --git a/docs/pyodide/chained_response.html b/docs/pyodide/chained_response.html new file mode 100644 index 0000000..325f848 --- /dev/null +++ b/docs/pyodide/chained_response.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Chained Response | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/chained_response.js b/docs/pyodide/chained_response.js new file mode 100644 index 0000000..550bff4 --- /dev/null +++ b/docs/pyodide/chained_response.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to chain responses from a single message in the callback.\n\nHighlight:\n\n- The \`respond\` parameter in the \`send\` method is used to chain responses.\n- It's also possible to use \`respond\` as a method to chain responses.\n"""\n\nfrom asyncio import sleep\n\nimport panel as pn\n\npn.extension()\n\nPERSON_1 = "Happy User"\nPERSON_2 = "Excited User"\nPERSON_3 = "Passionate User"\n\n\nasync def callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n await sleep(2)\n if user == "User":\n instance.send(\n f"Hey, {PERSON_2}! Did you hear the user?",\n user=PERSON_1,\n avatar="\U0001f60a",\n respond=True, # This is the default, but it's here for clarity\n )\n elif user == PERSON_1:\n user_message = instance.objects[-2]\n user_contents = user_message.object\n yield pn.chat.ChatMessage(\n f'Yeah, they said "{user_contents}"! Did you also hear {PERSON_3}?',\n user=PERSON_2,\n avatar="\U0001f604",\n )\n instance.respond()\n elif user == PERSON_2:\n instance.send(\n f"Yup, I heard!",\n user=PERSON_3,\n avatar="\U0001f606",\n respond=False,\n )\n\n\nchat_interface = pn.chat.ChatInterface(\n help_text="Send a message to start the conversation!", callback=callback\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/control_callback_response.html b/docs/pyodide/control_callback_response.html new file mode 100644 index 0000000..408e7bb --- /dev/null +++ b/docs/pyodide/control_callback_response.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Control Callback Response | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/control_callback_response.js b/docs/pyodide/control_callback_response.js new file mode 100644 index 0000000..966082c --- /dev/null +++ b/docs/pyodide/control_callback_response.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to precisely control the callback response.\n\nHighlights:\n\n- Use a placeholder text to display a message while waiting for the response.\n- Use a placeholder threshold to control when the placeholder text is displayed.\n- Use send instead of stream/yield/return to keep the placeholder text while still sending a message, ensuring respond=False to avoid a recursive loop.\n- Use yield to continuously update the response message.\n- Use pn.chat.ChatMessage or dict to send a message with a custom user and avatar.\n"""\n\nfrom asyncio import sleep\nfrom random import choice\n\nimport panel as pn\n\npn.extension()\n\n\nasync def callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n await sleep(0.5)\n # use send instead of stream/yield/return to keep the placeholder text\n # while still sending a message; ensure respond=False to avoid a recursive loop\n instance.send(\n "Let me flip the coin for you...", user="Game Master", avatar="\U0001f3b2", respond=False\n )\n await sleep(1)\n\n characters = "/|\\\\_"\n index = 0\n for _ in range(0, 28):\n index = (index + 1) % len(characters)\n # use yield to continuously update the response message\n # use pn.chat.ChatMessage to send a message with a custom user and avatar\n yield pn.chat.ChatMessage("\\r" + characters[index], user="Coin", avatar="\U0001fa99")\n await sleep(0.005)\n\n result = choice(["heads", "tails"])\n if result in contents.lower():\n # equivalently, use a dict instead of a pn.chat.ChatMessage\n yield {"object": f"Woohoo, {result}! You win!", "user": "Coin", "avatar": "\U0001f3b2"}\n else:\n yield {"object": f"Aw, got {result}. Try again!", "user": "Coin", "avatar": "\U0001f3b2"}\n\n\nchat_interface = pn.chat.ChatInterface(\n widgets=[\n pn.widgets.RadioButtonGroup(\n options=["Heads!", "Tails!"], button_type="primary", button_style="outline"\n )\n ],\n callback=callback,\n help_text="Select heads or tails, then click send!",\n placeholder_text="Waiting for the result...",\n placeholder_threshold=0.1,\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/custom_input_widgets.html b/docs/pyodide/custom_input_widgets.html new file mode 100644 index 0000000..7c13ed9 --- /dev/null +++ b/docs/pyodide/custom_input_widgets.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Custom Input Widgets | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/custom_input_widgets.js b/docs/pyodide/custom_input_widgets.js new file mode 100644 index 0000000..a1729ca --- /dev/null +++ b/docs/pyodide/custom_input_widgets.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to use the \`ChatInterface\` and custom widgets,\nlike \`ChatAreaInput\` and \`FileInput\`, to create a chatbot that counts\nthe number of lines in a message or file.\n\nHighlights:\n\n- The \`ChatAreaInput\` and \`FileInput\` widgets are used to create a custom\n chatbot that counts the number of lines in a message or file.\n- The \`callback\` function is used to count the number of lines in the message\n or file and return the result to the User.\n"""\n\nimport panel as pn\n\npn.extension()\n\n\ndef callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n lines = contents.strip().count("\\n")\n message = f"This snippet has {lines + 1} lines."\n return message\n\n\nchat_input = pn.chat.ChatAreaInput(placeholder="Send a message")\nfile_input = pn.widgets.FileInput(accept=".py")\nchat_interface = pn.chat.ChatInterface(\n callback=callback, widgets=[chat_input, file_input]\n)\nchat_interface.send(\n "Enter a message in the ChatAreaInput below to count how many lines there is, "\n "or upload a Python file to count the number of lines in the file.",\n user="System",\n respond=False,\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/delayed_placeholder.html b/docs/pyodide/delayed_placeholder.html new file mode 100644 index 0000000..64e8346 --- /dev/null +++ b/docs/pyodide/delayed_placeholder.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Delayed Placeholder | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/delayed_placeholder.js b/docs/pyodide/delayed_placeholder.js new file mode 100644 index 0000000..f8856b6 --- /dev/null +++ b/docs/pyodide/delayed_placeholder.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to delay the display of the placeholder.\n\nHighlights:\n\n- The \`placeholder_threshold\` parameter is used to delay the display of the placeholder.\n If the response time is less than the threshold, the placeholder will not be displayed.\n- The \`placeholder_text\` parameter is used to customize the placeholder text.\n"""\n\nfrom asyncio import sleep\n\nimport panel as pn\n\npn.extension()\n\n\nasync def callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n try:\n seconds = float(contents)\n if 0 < seconds < 10:\n await sleep(seconds)\n return f"Slept {contents} seconds!"\n else:\n return "Please enter a number between 1 and 9!"\n except ValueError:\n return "Please enter a number!"\n\n\nchat_interface = pn.chat.ChatInterface(\n callback=callback,\n placeholder_threshold=2,\n placeholder_text="Waiting for reply...",\n)\nchat_interface.send(\n "Send a number to make the system sleep between 1 and 9 seconds!",\n user="System",\n respond=False,\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/echo_chat.html b/docs/pyodide/echo_chat.html new file mode 100644 index 0000000..dada610 --- /dev/null +++ b/docs/pyodide/echo_chat.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Echo Chat | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/echo_chat.js b/docs/pyodide/echo_chat.js new file mode 100644 index 0000000..397ddb7 --- /dev/null +++ b/docs/pyodide/echo_chat.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to use the \`ChatInterface\` and a \`callback\` function to respond.\n\nHighlights:\n\n- The \`ChatInterface\` and a \`callback\` function are used to create a\n chatbot that echoes back the message entered by the User.\n- The \`help_text\` parameter is used to provide instructions to the User.\n"""\n\nimport panel as pn\n\npn.extension()\n\n\ndef callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n message = f"Echoing {user}: {contents}"\n return message\n\n\nchat_interface = pn.chat.ChatInterface(\n callback=callback,\n help_text="Enter a message in the TextInput below and receive an echo!",\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/stream_echo_chat.html b/docs/pyodide/stream_echo_chat.html new file mode 100644 index 0000000..f7c780b --- /dev/null +++ b/docs/pyodide/stream_echo_chat.html @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + Stream Echo Chat | Panel Chat Examples + + + + + + + + + + + +
+
+
+ + + + + + \ No newline at end of file diff --git a/docs/pyodide/stream_echo_chat.js b/docs/pyodide/stream_echo_chat.js new file mode 100644 index 0000000..87a70f4 --- /dev/null +++ b/docs/pyodide/stream_echo_chat.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to use the \`ChatInterface\` and a \`callback\` function to\nstream back responses.\n\nThe chatbot Assistant echoes back the message entered by the User in an\n*async streaming* fashion.\n\nHighlights:\n\n- The function is defined as \`async\` and uses \`yield\` to stream back responses.\n- Initialize \`message\` first to gather the characters and then \`yield\` it;\n without it, only one letter would be displayed at a time.\n"""\n\n\nfrom asyncio import sleep\n\nimport panel as pn\n\npn.extension()\n\n\nasync def callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n await sleep(1)\n message = ""\n for char in "Echoing User: " + contents:\n await sleep(0.05)\n message += char\n yield message\n\n\nchat_interface = pn.chat.ChatInterface(callback=callback)\nchat_interface.send(\n "Enter a message below and receive an echo!",\n user="System",\n respond=False,\n)\nchat_interface.servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/docs/pyodide/styled_slim_interface.html b/docs/pyodide/styled_slim_interface.html new file mode 100644 index 0000000..d551e36 --- /dev/null +++ b/docs/pyodide/styled_slim_interface.html @@ -0,0 +1,358 @@ + + + + + + + + + + + + + + + + + + + Styled Slim Interface | Panel Chat Examples + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + +
+ + +
+
+ + + + + + +
+
+
+
+ +
+
+
+ + + + + +
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/docs/pyodide/styled_slim_interface.js b/docs/pyodide/styled_slim_interface.js new file mode 100644 index 0000000..6f54fbe --- /dev/null +++ b/docs/pyodide/styled_slim_interface.js @@ -0,0 +1,97 @@ +importScripts("https://cdn.jsdelivr.net/pyodide/v0.25.0/full/pyodide.js"); + +function sendPatch(patch, buffers, msg_id) { + self.postMessage({ + type: 'patch', + patch: patch, + buffers: buffers + }) +} + +async function startApplication() { + console.log("Loading pyodide!"); + self.postMessage({type: 'status', msg: 'Loading pyodide'}) + self.pyodide = await loadPyodide(); + self.pyodide.globals.set("sendPatch", sendPatch); + console.log("Loaded!"); + await self.pyodide.loadPackage("micropip"); + const env_spec = ['https://cdn.holoviz.org/panel/wheels/bokeh-3.4.1-py3-none-any.whl', 'https://cdn.holoviz.org/panel/1.4.2/dist/wheels/panel-1.4.2-py3-none-any.whl', 'pyodide-http==0.2.1'] + for (const pkg of env_spec) { + let pkg_name; + if (pkg.endsWith('.whl')) { + pkg_name = pkg.split('/').slice(-1)[0].split('-')[0] + } else { + pkg_name = pkg + } + self.postMessage({type: 'status', msg: `Installing ${pkg_name}`}) + try { + await self.pyodide.runPythonAsync(` + import micropip + await micropip.install('${pkg}'); + `); + } catch(e) { + console.log(e) + self.postMessage({ + type: 'status', + msg: `Error while installing ${pkg_name}` + }); + } + } + console.log("Packages loaded!"); + self.postMessage({type: 'status', msg: 'Executing code'}) + const code = ` + \nimport asyncio\n\nfrom panel.io.pyodide import init_doc, write_doc\n\ninit_doc()\n\n"""\nDemonstrates how to create a slim \`ChatInterface\` that fits in the sidebar.\n\nHighlights:\n\n- The \`ChatInterface\` is placed in the sidebar.\n- Set \`show_*\` parameters to \`False\` to hide the respective buttons.\n- Use \`message_params\` to customize the appearance of each chat messages.\n"""\nimport panel as pn\n\npn.extension()\n\n\nasync def callback(contents: str, user: str, instance: pn.chat.ChatInterface):\n message = f"Echoing {user}: {contents}"\n return message\n\n\nchat_interface = pn.chat.ChatInterface(\n callback=callback,\n show_send=False,\n show_rerun=False,\n show_undo=False,\n show_clear=False,\n show_avatar=False,\n show_timestamp=False,\n show_button_name=False,\n show_reaction_icons=False,\n sizing_mode="stretch_width",\n height=700,\n message_params={\n "stylesheets": [\n """\n .message {\n font-size: 1em;\n }\n .name {\n font-size: 0.9em;\n }\n .timestamp {\n font-size: 0.9em;\n }\n """\n ]\n },\n)\n\nmain = """\nWe've put a *slim* \`ChatInterface\` in the sidebar. In the main area you\ncould add the object you are chatting about\n"""\n\npn.template.FastListTemplate(\n main=[main],\n sidebar=[chat_interface],\n sidebar_width=500,\n).servable()\n\n\nawait write_doc() + ` + + try { + const [docs_json, render_items, root_ids] = await self.pyodide.runPythonAsync(code) + self.postMessage({ + type: 'render', + docs_json: docs_json, + render_items: render_items, + root_ids: root_ids + }) + } catch(e) { + const traceback = `${e}` + const tblines = traceback.split('\n') + self.postMessage({ + type: 'status', + msg: tblines[tblines.length-2] + }); + throw e + } +} + +self.onmessage = async (event) => { + const msg = event.data + if (msg.type === 'rendered') { + self.pyodide.runPythonAsync(` + from panel.io.state import state + from panel.io.pyodide import _link_docs_worker + + _link_docs_worker(state.curdoc, sendPatch, setter='js') + `) + } else if (msg.type === 'patch') { + self.pyodide.globals.set('patch', msg.patch) + self.pyodide.runPythonAsync(` + from panel.io.pyodide import _convert_json_patch + state.curdoc.apply_json_patch(_convert_json_patch(patch), setter='js') + `) + self.postMessage({type: 'idle'}) + } else if (msg.type === 'location') { + self.pyodide.globals.set('location', msg.location) + self.pyodide.runPythonAsync(` + import json + from panel.io.state import state + from panel.util import edit_readonly + if state.location: + loc_data = json.loads(location) + with edit_readonly(state.location): + state.location.param.update({ + k: v for k, v in loc_data.items() if k in state.location.param + }) + `) + } +} + +startApplication() diff --git a/mkdocs.yml b/mkdocs.yml index 50e32e1..c969a3e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -55,15 +55,10 @@ plugins: watch: - docs - - panel_chat_examples nav: - Home: index.md - - Basics: basics.md - - Components: components.md - - Features: features.md - - Langchain: langchain.md - - LlamaIndex: llama_index.md - - Mistral: mistral.md - - OpenAI: openai.md - - External: external_resources.md + - Chat Features: chat_features.md + - Kickstart Snippets: kickstart_snippets.md + - Applicable Recipes: applicable_recipes.md + - Linked Resources: linked_resources.md diff --git a/panel_chat_examples/__init__.py b/panel_chat_examples/__init__.py index c61296a..6c8e6b9 100644 --- a/panel_chat_examples/__init__.py +++ b/panel_chat_examples/__init__.py @@ -1,7 +1 @@ -"""Shared functionality for Panel Chat Examples.""" -from panel_chat_examples._environment_widget import EnvironmentWidgetBase - __version__ = "0.0.0" - - -__all__ = ["EnvironmentWidgetBase"] diff --git a/panel_chat_examples/_environment_widget.py b/panel_chat_examples/_environment_widget.py deleted file mode 100644 index de06cb4..0000000 --- a/panel_chat_examples/_environment_widget.py +++ /dev/null @@ -1,144 +0,0 @@ -"""The `EnvironmentWidgetBase` class enables you to manage variable values from a -combination of - -- custom variable values -- environment variables -- user input. - -(listed by order of precedence) - -You can use it as a drop in replacement for `os.environ`. - -For example you might not have the resources to provide an `OPENAI_API_KEY`, -`WEAVIATE_API_KEY` or `LANGCHAIN_API_KEY`. In that case you would would like to ask the -user for it. - -Inherit from this widget to create your own custom `EnvironmentWidget`. -""" -# Longer term we should try to get this widget included in Panel -# But for now it can help us deploy applications to Hugging Face etc without paying for -# api keys -import os - -import panel as pn -import param - -WIDGET_MAX_WIDTH = 600 - - -class VariableNotFound(Exception): - def __init__(self, key: str) -> None: - super().__init__(f"The __key '{key}' is not a supported variable!") - - -class EnvironmentWidgetBase(pn.viewable.Viewer): - """The `EnvironmentWidgetBase` class enables you to manage variable values from a - combination of - - - custom variable values - - environment variables - - user input. - - (listed by order of precedence) - - You can use it as a drop in replacement for `os.environ`. - - For example you might not have the resources to provide an `OPENAI_API_KEY`, - `WEAVIATE_API_KEY` or `LANGCHAIN_API_KEY`. In that case you would would like to ask - the user for it. - - >>> class EnvironmentWidget(EnvironmentWidgetBase): - ... OPENAI_API_KEY = param.String(doc="A key for the OpenAI api") - ... WEAVIATE_API_KEY = param.String(doc="A key for the Weaviate api") - ... LANGCHAIN_API_KEY = param.String(doc="A key for the LangChain api")""" - - message_alert: str = param.String( - ( - "**Protect your secrets!** Make sure you trust " - "the publisher of this app before entering your secrets." - ), - doc="""An Alert message to display to the user to make them handle their secrets - securely. If not set, then no Alert is displayed""", - ) - - variables_not_set = param.List( - constant=True, doc="A list of the variables with no value" - ) - variables_set = param.List( - constant=True, doc="A list of the variables with a value" - ) - - def __init__(self, **params): - self._variables = self._get_variables() - - for variable in self._variables: - params[variable] = params.get(variable, os.environ.get(variable, "")) - - layout_params = {} - for variable, value in params.items(): - if variable in pn.Column.param: - layout_params[variable] = value - for variable in layout_params: - params.pop(variable) - - super().__init__(**params) - - self._layout = self._create_layout(**layout_params) - - def __panel__(self): - return self._layout - - def _get_variables(self): - return tuple( - key for key in self.param if key not in EnvironmentWidgetBase.param - ) - - def _create_layout(self, **params): - self._update_missing_variables(None) - if not self.variables_not_set: - return pn.Column(height=0, width=0, margin=0, sizing_mode="fixed") - - layout = pn.Column(**params) - if self.message_alert: - alert = pn.pane.Alert( - self.message_alert, - alert_type="danger", - sizing_mode="stretch_width", - ) - layout.append(alert) - - for key in self.variables_not_set: - parameter = self.param[key] - input_widget = pn.widgets.PasswordInput.from_param( - parameter, - max_width=WIDGET_MAX_WIDTH, - sizing_mode="stretch_width", - align="center", - ) - - pn.bind(self._update_missing_variables, input_widget, watch=True) - layout.append(input_widget) - return layout - - def _update_missing_variables(self, _): - missing = [] - not_missing = [] - for key in self._variables: - if not getattr(self, key): - missing.append(key) - else: - not_missing.append(key) - with param.edit_constant(self): - self.variables_not_set = sorted(missing) - self.variables_set = sorted(not_missing) - - def get(self, __key: str, default: str) -> str: - if __key not in self._variables: - raise VariableNotFound(key=__key) - return getattr(self, __key) or default - - def __getitem__(self, key): - value = self.get(key, "") - if not value: - raise VariableNotFound(key=key) - return value diff --git a/panel_chat_examples/components/__init__.py b/panel_chat_examples/components/__init__.py deleted file mode 100644 index 042a779..0000000 --- a/panel_chat_examples/components/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from panel_chat_examples.components.chat_input import ChatInput -from panel_chat_examples.components.status import Status - -__all__ = [ - "ChatInput", - "Status", -] diff --git a/panel_chat_examples/components/chat_input.py b/panel_chat_examples/components/chat_input.py deleted file mode 100644 index aaa3cc8..0000000 --- a/panel_chat_examples/components/chat_input.py +++ /dev/null @@ -1,74 +0,0 @@ -"""The `ChatInput` widget is a combination of a `TextInput` widget and a `Button`. -When the input is submitted the `TextInput` widget is cleared and ready to accept -a new input.""" -import panel as pn -import param - - -class ChatInput(pn.viewable.Viewer): - """The `ChatInput` widget is a combination of a `TextInput` widget and a `Button`. - When the input is submitted the `TextInput` widget is cleared and ready to accept - a new input.""" - - value: str = param.String(doc="""The text value""") - - disabled: bool = param.Boolean( - doc=""" - Whether or not the widget is disabled. Default is False""" - ) - max_length = param.Integer( - default=5000, - doc=""" - The max_length of the text input""", - ) - placeholder = param.String( - "Send a message", - doc=""" - An initial placeholder to display in the TextInput""", - ) - - def __init__(self, **params): - layout_params = { - key: value - for key, value in params.items() - if key not in ["value", "placeholder", "disabled", "max_length"] - } - params = { - key: value for key, value in params.items() if key not in layout_params - } - - super().__init__(**params) - - self._text_input = pn.widgets.TextInput( - align="center", - disabled=self.param.disabled, - max_length=self.param.max_length, - name="Message", - placeholder=self.param.placeholder, - sizing_mode="stretch_width", - ) - self._submit_button = pn.widgets.Button( - align="center", - disabled=self.param.disabled, - icon="send", - margin=(18, 5, 10, 0), - name="", - sizing_mode="fixed", - ) - pn.bind( - self._update_value, - value=self._text_input, - event=self._submit_button, - watch=True, - ) - - self._layout = pn.Row( - self._text_input, self._submit_button, align="center", **layout_params - ) - - def __panel__(self): - return self._layout - - def _update_value(self, value, event): - self.value = value or self.value - self._text_input.value = "" diff --git a/panel_chat_examples/components/status.py b/panel_chat_examples/components/status.py deleted file mode 100644 index 2c862c1..0000000 --- a/panel_chat_examples/components/status.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -The `Status` *indicator* can report progress in steps and with -detailed context.""" -from contextlib import contextmanager - -import panel as pn -import param -from panel.widgets.indicators import LoadingSpinner - -COLORS = { - "running": "green", - "complete": "black", - "error": "red", - "next": "lightgray", -} -STATUS_PARAMETERS = ["value", "title", "collapsed", "bgcolor", "color", "steps", "step"] - - -class Status(pn.viewable.Viewer): - """The `Status` *indicator* can report progress in steps and with - detailed context.""" - - value = param.Selector( - default="complete", - objects=["complete", "running", "error"], - doc=""" - The current state of the Status indicator. One of 'complete', - 'running' or 'error'""", - ) - title = param.String(doc="The title shown in the card header") - - bgcolor = param.ObjectSelector( - default=LoadingSpinner.param.bgcolor.default, - objects=LoadingSpinner.param.bgcolor.objects, - doc="""The background color of the LoadingSpinner""", - ) - color = param.ObjectSelector( - default="success", - objects=LoadingSpinner.param.color.objects, - doc="""The color of the LoadingSpinner""", - ) - collapsed = param.Boolean( - default=True, doc="""Whether or not the Card is collapsed""" - ) - - steps = param.List(constant=False, doc="""A list of (markdown) string steps""") - step = param.Parameter(constant=True, doc="""The current step""") - - def __init__(self, title: str, **params): - params["title"] = title - params["steps"] = params.get("steps", []) - layout_params = { - key: value for key, value in params.items() if key not in STATUS_PARAMETERS - } - params = { - key: value for key, value in params.items() if key not in layout_params - } - super().__init__(**params) - - self._indicators = { - "running": pn.indicators.LoadingSpinner( - value=True, - color=self.param.color, - bgcolor=self.param.bgcolor, - size=25, - # margin=(15, 0, 0, 0), - ), - "complete": "✔️", - "error": "❌", - } - - self._title_pane = pn.pane.Markdown(self.param.title, align="center") - self._header_row = pn.Row( - pn.panel(self._indicator, sizing_mode="fixed", width=40, align="center"), - self._title_pane, - sizing_mode="stretch_width", - margin=(0, 5), - ) - self._details_pane = pn.pane.HTML( - self._details, margin=(10, 5, 10, 55), sizing_mode="stretch_width" - ) - self._layout = pn.Card( - self._details_pane, - header=self._header_row, - collapsed=self.param.collapsed, - **layout_params, - ) - - def __panel__(self): - return self._layout - - @param.depends("value") - def _indicator(self): - return self._indicators[self.value] - - @property - def _step_color(self): - return COLORS[self.value] - - def _step_index(self): - if self.step not in self.steps: - return 0 - return self.steps.index(self.step) - - @param.depends("step", "value") - def _details(self): - steps = self.steps - - if not steps: - return "" - - index = self._step_index() - - html = "" - for step in steps[:index]: - html += f"
{step}
" - step = steps[index] - html += f"
{step}
" - for step in steps[index + 1 :]: - html += f"
{step}
" - - return html - - def progress(self, step: str): - with param.edit_constant(self): - self.value = "running" - if step not in self.steps: - self.steps = self.steps + [step] - self.step = step - - def reset(self): - with param.edit_constant(self): - self.steps = [] - self.value = self.param.value.default - - def start(self): - with param.edit_constant(self): - self.step = None - self.value = "running" - - def complete(self): - self.value = "complete" - - @contextmanager - def report(self): - self.start() - try: - yield self.progress - except Exception: - self.value = "error" - else: - self.complete() diff --git a/pyproject.toml b/pyproject.toml index 8348985..93f68e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ dependencies = [ "pytest-playwright", "pytest", "ruff", + "panel-chat-examples[all]", ] [tool.hatch.envs.default.scripts] @@ -47,16 +48,14 @@ lint = [ "isort .", "black .", "ruff --fix .", - "mypy panel_chat_examples/", ] lint-check = [ "isort . --check-only", "black --check .", "ruff .", - "mypy panel_chat_examples/", ] docs-serve = "python scripts/generate_gallery.py;mkdocs serve" -docs-build = "python scripts/generate_gallery.py;mkdocs build" +docs-build = "python scripts/postprocess_videos.py;python scripts/generate_gallery.py;mkdocs build;mkdocs serve" panel-serve = "panel serve docs/examples/**/*.py --static-dirs thumbnails=docs/assets/thumbnails --autoreload" panel-convert = "python scripts/convert_apps.py" docs-record = "pytest -s -m ui --screenshot on --video on --headed && python scripts/postprocess_videos.py" @@ -85,21 +84,27 @@ classifiers = [ ] requires-python = ">=3.9" dependencies = [ - "chromadb", - "ctransformers", - "hvplot", - "langchain-experimental", - "langchain", - "matplotlib", - "numexpr", - "openai>1.0.0", - "panel>=1.3.0", - "plotly", - "pypdf", - "tabulate", - "tiktoken", + "panel>=1.4.0rc" +] + +[project.optional-dependencies] +openai = [ + "openai", +] +mistralai = [ "mistralai", - "langchain-openai", +] +langchain = [ + "langchain>=0.0.350", +] +llama_index = [ + "llama_index>=0.10.40", +] +llama_cpp_python = [ + "llama_cpp_python", +] +all = [ + "panel-chat-examples[openai,mistralai,langchain,llama_index,llama_cpp]", ] [project.urls] diff --git a/scripts/convert_apps.py b/scripts/convert_apps.py index de16998..9e339d6 100644 --- a/scripts/convert_apps.py +++ b/scripts/convert_apps.py @@ -7,19 +7,20 @@ NUM_THREADS = 20 -DOCS_PATH = Path(__file__).parent.parent / "docs" +THIS_DIR = Path(__file__).parent +DOCS_PATH = THIS_DIR.parent / "docs" +REQUIREMENTS_PATH = (THIS_DIR / "requirements.txt").absolute() EXAMPLES_PATH = DOCS_PATH / "examples" APP_PATH = DOCS_PATH / "pyodide" APPS_READY_FOR_PYODIDE = [ - "basic_chat.py", - "basic_streaming_chat.py", - "basic_streaming_chat_async.py", - # "component_environment_widget.py", # imports panel_chat_examples - "feature_chained_response.py", - "feature_delayed_placeholder.py", - # "feature_replace_response.py", # https://github.com/holoviz/panel/issues/5700 - "feature_slim_interface.py", + "chained_response.py", + "control_callback_response.py", + "custom_input_widgets.py", + "delayed_placeholder.py", + "echo_chat.py", + "stream_echo_chat.py", + "styled_slim_interface.py", ] BEFORE = """\ @@ -75,6 +76,8 @@ def _convert(file): "pyodide-worker", "--out", str(APP_PATH), + "--requirements", + str(REQUIREMENTS_PATH), ] ) _replace(file) diff --git a/scripts/generate_gallery.py b/scripts/generate_gallery.py index 882d4a5..3c93710 100644 --- a/scripts/generate_gallery.py +++ b/scripts/generate_gallery.py @@ -12,7 +12,6 @@ INDEX_MD_PATH = DOCS_PATH / "index.md" THUMBNAILS_PATH = DOCS_PATH / "assets" / "thumbnails" VIDEOS_PATH = DOCS_PATH / "assets" / "videos" -PREFIX = {"basics": "basic", "components": "component", "features": "feature"} # ruff: noqa: E501 VIDEO_URL = "https://github.com/holoviz-topics/panel-chat-examples/assets/42288570/cdb78a39-b98c-44e3-886e-29de6a079bde" VIDEO_TAG = """\ @@ -20,6 +19,34 @@ """ +DESCRIPTION = { + "chat_features": ( + "Highlights some features of Panel's chat components; " + "they do not require other packages besides Panel." + ), + "applicable_recipes": ( + "Demonstrates how to use Panel's chat components to " + "achieve specific tasks with popular LLM packages." + ), + "kickstart_snippets": ( + "Quickly start using Panel's chat components with popular LLM packages " + "by copying and pasting one of these snippets. All of these examples support:\n\n" + "- Streaming\n" + "- Async\n" + "- Memory\n" + ), +} + +ORDERING = { + "chat_features": [ + "echo_chat.py", + "stream_echo_chat.py", + "custom_input_widgets.py", + "delayed_placeholder.py", + "chained_response.py", + ] +} + def _copy_readme_to_index(): text = README_PATH.read_text() @@ -39,22 +66,28 @@ def run(): _copy_readme_to_index() for folder in sorted(EXAMPLES_PATH.glob("**/"), key=lambda folder: folder.name): - if folder.name in ["examples", "__pycache__"]: + if folder.name not in DESCRIPTION.keys(): continue # Loop through each .py file in the folder docs_file_path = DOCS_PATH / folder.with_suffix(".md").name - text = f"\n# {folder.name.title()}\n" - - for file in sorted(folder.glob("*.py")): - prefix = PREFIX.get(folder.name, folder.name) - + description = DESCRIPTION[folder.name] + text = f"\n# {folder.name.title().replace('_', ' ')}\n{description}\n" + + ordering = ORDERING.get(folder.name, []) + files = sorted( + folder.glob("*.py"), + key=lambda file: ( + ordering.index(file.name) if file.name in ordering else 999 + ), + ) + for file in files: title = ( file.name.replace(".py", "") .replace("_", " ") - .replace(prefix, "") .strip() .title() + .rstrip("_") ) parent_path = Path("..") source_path = parent_path / file.relative_to(EXAMPLES_PATH.parent) @@ -71,9 +104,14 @@ def run(): elif '"""' in line: in_docstring = True - thumbnail = THUMBNAILS_PATH / file.name.replace(".py", ".png") - video = VIDEOS_PATH / file.name.replace(".py", ".mp4") + thumbnail = THUMBNAILS_PATH / file.name.replace(".py", ".png").replace( + "_.png", ".png" + ) + video = VIDEOS_PATH / file.name.replace(".py", ".mp4").replace( + "_.mp4", ".mp4" + ) + print(video, video.exists()) if video.exists() and thumbnail.exists(): video_str = dedent( f""" diff --git a/scripts/postprocess_videos.py b/scripts/postprocess_videos.py index de80e59..c2dddb4 100644 --- a/scripts/postprocess_videos.py +++ b/scripts/postprocess_videos.py @@ -13,7 +13,7 @@ for webm_path, png_path in zip(webm_paths, png_paths): # examples-...-openai-openai-chat-py-chromium -> openai_chat example_name = "_".join( - webm_path.parent.name.split("examples-")[-1].split("-")[1:-1] + webm_path.parent.name.split("examples-")[-1].split("-")[2:-1] ).replace("_py", "") mp4_path = (VIDEOS_DIR / example_name).with_suffix(".mp4").absolute() print(f"Converting {webm_path} to {mp4_path}") diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 0000000..f9ec12d --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1 @@ +panel diff --git a/tests/examples/__init__.py b/tests/examples/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/examples/test_environment_widget.py b/tests/examples/test_environment_widget.py deleted file mode 100644 index 5cd1e99..0000000 --- a/tests/examples/test_environment_widget.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Tests the EnvironmentWidgetBase""" -import os - -import param -import pytest - -from panel_chat_examples import EnvironmentWidgetBase -from panel_chat_examples._environment_widget import VariableNotFound - -os.environ["SOME_VALUE"] = "SOME_VALUE" - - -class EnvironmentWidget(EnvironmentWidgetBase): - """An example Environment Widget for testing""" - - SOME_VALUE = param.String(doc="A key for the OpenAI api") - ANOTHER_VALUE = param.String(doc="A key for the Weaviate api") - - -def test_construct_from_os_environ(): - """Test that it takes environment variable in constructor""" - environ = EnvironmentWidget() - assert environ.SOME_VALUE == "SOME_VALUE" - assert not environ.ANOTHER_VALUE - assert environ.variables_set == ["SOME_VALUE"] - assert environ.variables_not_set == ["ANOTHER_VALUE"] - - -def test_construct_from_custom_values(): - """Test that custom values takes precedence over environment variables""" - environ = EnvironmentWidget(SOME_VALUE="NEW_VALUE", ANOTHER_VALUE="ANOTHER_VALUE") - assert environ.SOME_VALUE == "NEW_VALUE" - assert environ.ANOTHER_VALUE == "ANOTHER_VALUE" - assert environ.variables_set == [ - "ANOTHER_VALUE", - "SOME_VALUE", - ] # Sorted alphabetically! - assert environ.variables_not_set == [] - - -def test_get(): - """Test the we can .get like os.environ""" - environ = EnvironmentWidget() - assert environ.get("SOME_VALUE", "A") == environ.SOME_VALUE - assert environ.get("ANOTHER_VALUE", "B") == "B" - - -def test_indexing(): - """Test the we can [] like os.environ""" - environ = EnvironmentWidget() - assert environ["SOME_VALUE"] == environ.SOME_VALUE - with pytest.raises(VariableNotFound): - environ["ANOTHER_VALUE"] # pylint: disable=pointless-statement diff --git a/docs/examples/langchain/example.pdf b/tests/ui/example.pdf similarity index 100% rename from docs/examples/langchain/example.pdf rename to tests/ui/example.pdf diff --git a/tests/ui/inputs.py b/tests/ui/inputs.py new file mode 100644 index 0000000..48b4144 --- /dev/null +++ b/tests/ui/inputs.py @@ -0,0 +1,126 @@ +import os +import re +import time +from pathlib import Path + +from playwright.sync_api import Page + +# Please note Playwright .click() does not work with Panel buttons +# Luckily .dispatch_event("click") does + +TIMEOUT = 350 + +EXAMPLE_PDF = str((Path.cwd() / "tests/ui/example.pdf").absolute()) +EXAMPLE_CSV = str((Path.cwd() / "tests/ui/example.csv").absolute()) +PENGUINS_CSV = str((Path.cwd() / "tests/ui/penguins.csv").absolute()) + + +class ChatInterface: + def __init__(self, page: Page): + self.page = page + + def send(self, value): + text_input = self.page.get_by_placeholder("Send a message") + self.page.wait_for_timeout(TIMEOUT) + text_input.fill(value) + self.page.wait_for_timeout(TIMEOUT) + text_input.press("Enter") + + def button_click(self, name): + self.page.get_by_role("button", name=name).dispatch_event("click") + self.page.wait_for_timeout(TIMEOUT) + + def send_click(self): + time.sleep(1) + self.button_click(" Send") + + +def default_chat(page: Page): + chat = ChatInterface(page) + chat.send("What is HoloViz Panel in a sentence") + page.wait_for_timeout(TIMEOUT * 10) + + +def custom_input_widgets(page: Page): + chat = ChatInterface(page) + chat.send("How many\nlines\nhere?") + page.get_by_text("This snippet has 3 lines.").inner_text() + page.wait_for_timeout(TIMEOUT * 3) + + +def control_callback_response(page: Page): + chat = ChatInterface(page) + chat.button_click(name="Tails!") + chat.send_click() + page.wait_for_timeout(TIMEOUT * 5) + chat.button_click(name="Heads!") + chat.send_click() + page.wait_for_timeout(TIMEOUT * 5) + + +def chained_response(page: Page): + chat = ChatInterface(page) + chat.send("Hello World") + page.get_by_text("Yup").inner_text() + page.wait_for_timeout(TIMEOUT * 3) + + +def delayed_placeholder(page: Page): + chat = ChatInterface(page) + chat.send("4") + page.get_by_text("Slept 4 seconds!").inner_text() + page.wait_for_timeout(TIMEOUT * 3) + + +def llama_cpp_python(page: Page): + chat = ChatInterface(page) + chat.send("What is HoloViz Panel in a sentence?") + page.wait_for_timeout(TIMEOUT * 10) + + +def langchain_chat_with_pandas(page: Page): + chat = ChatInterface(page) + page.get_by_role("textbox").set_input_files(PENGUINS_CSV) + page.wait_for_timeout(333) + chat.button_click(" Send") + page.get_by_text("For example 'how many species are there?'").wait_for() + chat.send("What are the species?") + page.get_by_text("The species in the dataframe are").wait_for() + page.wait_for_timeout(100) + chat.send("What is the average bill length per species?") + page.get_by_text("The average bill length per species is as follows").wait_for() + page.wait_for_timeout(2500) + + +def langchain_chat_with_pdf(page: Page): + chat = ChatInterface(page) + page.locator('input[type="file"]').set_input_files(EXAMPLE_PDF) + page.wait_for_timeout(1000) + chat.send_click() + page.get_by_text("Let's chat about the PDF!").wait_for() + page.wait_for_timeout(1000) + page.get_by_placeholder("Ask questions here!").fill("What assets does the PSF own?") + page.get_by_placeholder("Ask questions here!").press("Enter") + page.wait_for_timeout(10000) + + +def openai_two_bots(page: Page): + chat = ChatInterface(page) + chat.send("HoloViz Panel") + page.wait_for_timeout(15000) + + +def openai_chat_with_hvplot(page: Page): + chat = ChatInterface(page) + chat.send("Plot the population, overlay by country") + page.wait_for_timeout(4000) + chat.send("Create a scatter of population vs life expectancy, overlay by country'") + page.wait_for_timeout(4000) + + +# get all the local functions here +# and put them in a dict +# so we can call them by name like {"openai_two_bots.py": openai_two_bots} +ACTION = {f"{func.__name__}.py": func for func in locals().values() if callable(func)} +ACTION["default_chat"] = default_chat +ZOOM = {} diff --git a/tests/ui/test_all.py b/tests/ui/test_all.py index e948cf6..7cff605 100644 --- a/tests/ui/test_all.py +++ b/tests/ui/test_all.py @@ -8,7 +8,7 @@ from panel.io.server import serve from playwright.sync_api import expect -from .user import ACTION, TIMEOUT, ZOOM +from .inputs import ACTION, TIMEOUT, ZOOM pytestmark = pytest.mark.ui @@ -74,7 +74,7 @@ def test_app(server, app_path, port, page): print(f"\n\nRunning {app_path} on http://localhost:{port}\n\n") # zoom and run should be defined for all examples # even if we don't run the video - run = ACTION[name] + run = ACTION.get(name, ACTION["default_chat"]) zoom = ZOOM.get(name, 1) # We cannot run these tests in pipelines etc. as they require models downloaded, diff --git a/tests/ui/user.py b/tests/ui/user.py deleted file mode 100644 index b64d0e0..0000000 --- a/tests/ui/user.py +++ /dev/null @@ -1,285 +0,0 @@ -import os -import re -from pathlib import Path - -from playwright.sync_api import Page - -# Please note Playwright .click() does not work with Panel buttons -# Luckily .dispatch_event("click") does - -TIMEOUT = 350 - -EXAMPLE_PDF = str((Path.cwd() / "docs/examples/langchain/example.pdf").absolute()) -EXAMPLE_CSV = str((Path.cwd() / "tests/ui/example.csv").absolute()) -PENGUINS_CSV = str((Path.cwd() / "tests/ui/penguins.csv").absolute()) - - -class ChatInterface: - def __init__(self, page: Page): - self.page = page - - def send(self, value): - text_input = self.page.get_by_placeholder("Send a message") - self.page.wait_for_timeout(TIMEOUT) - text_input.fill(value) - self.page.wait_for_timeout(TIMEOUT) - text_input.press("Enter") - - def button_click(self, name): - self.page.get_by_role("button", name=name).dispatch_event("click") - self.page.wait_for_timeout(TIMEOUT) - - def send_click(self): - self.button_click(" Send") - - -def basic_chat(page: Page): - chat = ChatInterface(page) - chat.send("Hello World") - page.get_by_text("Echoing User: Hello World").inner_text() - - -def basic_streaming_chat(page: Page): - chat = ChatInterface(page) - chat.send("Hello World") - page.get_by_text("Echoing User: Hello World").inner_text() - - -def basic_streaming_chat_async(page: Page): - chat = ChatInterface(page) - chat.send("Hello World") - page.get_by_text("Echoing User: Hello World").inner_text() - - -def basic_custom_widgets(page: Page): - chat = ChatInterface(page) - chat.send("How many\nlines\nhere?") - page.get_by_text("This snippet has 1 lines.").inner_text() - - -def component_chat_input(page: Page): - text_input = page.get_by_placeholder("Say something") - - text_input.fill("Hello World") - page.wait_for_timeout(TIMEOUT) - text_input.press("Enter") - page.get_by_text("User has sent the following prompt: Hello World").wait_for() - - text_input.fill("Could you please repeat that?") - page.wait_for_timeout(TIMEOUT) - text_input.press("Enter") - page.get_by_text( - "User has sent the following prompt: Could you please repeat that?" - ).wait_for() - - -def component_environment_widget(page: Page): - langchain = page.get_by_role("textbox").nth(0) - langchain.fill("some-secret") - langchain.press("Enter") - page.wait_for_timeout(4 * TIMEOUT) - weviate = page.get_by_role("textbox").nth(1) - weviate.fill("another-secret") - weviate.press("Enter") - page.wait_for_timeout(4 * TIMEOUT) - - -def component_status(page: Page): - page.get_by_role("button", name="Run").dispatch_event("click") - page.get_by_text("Validating data...").wait_for() - page.wait_for_timeout(TIMEOUT) - - -def feature_chained_response(page: Page): - chat = ChatInterface(page) - chat.send("Hello World") - page.get_by_text('Yeah! They said "Hello World".').inner_text() - - -def feature_delayed_placeholder(page: Page): - chat = ChatInterface(page) - chat.send("4") - page.get_by_text("Slept 4 seconds!").inner_text() - - -def feature_replace_response(page: Page): - chat = ChatInterface(page) - - chat.button_click(name="Tails!") - chat.send_click() - page.wait_for_timeout(4 * TIMEOUT) - chat.button_click(name="Heads!") - chat.send_click() - page.wait_for_timeout(4 * TIMEOUT) - - -def feature_slim_interface(page: Page): - chat = ChatInterface(page) - chat.send("Hello World") - page.get_by_text("Echoing User: Hello World").inner_text() - - -def langchain_llama_and_mistral(page: Page): - # Needs some finetuning - # Could not get this working as it always starts by downloading models - chat = ChatInterface(page) - chat.send("Please explain what kind of model you are in one sentence") - page.wait_for_timeout(15000) - - -def langchain_chat_pandas_df(page: Page): - chat = ChatInterface(page) - page.get_by_role("textbox").set_input_files(PENGUINS_CSV) - page.wait_for_timeout(333) - chat.button_click(" Send") - page.get_by_text("For example 'how many species are there?'").wait_for() - chat.send("What are the species?") - page.get_by_text("The species in the dataframe are").wait_for() - page.wait_for_timeout(100) - chat.send("What is the average bill length per species?") - page.get_by_text("The average bill length per species is as follows").wait_for() - page.wait_for_timeout(2500) - - -def langchain_with_memory(page: Page): - chat = ChatInterface(page) - chat.send("Tell me what HoloViz Panel is in one sentence") - page.wait_for_timeout(4 * TIMEOUT) - chat.send("Tell me more") - page.wait_for_timeout(6 * TIMEOUT) - - -def langchain_math_assistant(page: Page): - chat = ChatInterface(page) - chat.send("What is the square root of 9?") - page.get_by_text("Answer:").wait_for() - page.wait_for_timeout(3000) - - -def langchain_pdf_assistant(page: Page): - chat = ChatInterface(page) - page.get_by_role("textbox").set_input_files(EXAMPLE_PDF) - page.wait_for_timeout(1000) - chat.send_click() - page.get_by_text("Let's chat about the PDF!").wait_for() - page.wait_for_timeout(500) - # chat.send("What assets does the PSF own?") - page.get_by_placeholder("Ask questions here!").fill("What assets does the PSF own?") - page.get_by_placeholder("Ask questions here!").press("Enter") - page.wait_for_timeout(10000) - - -def langchain_lcel(page: Page): - chat = ChatInterface(page) - chat.send("Python") - page.wait_for_timeout(5000) - - -def langchain_streaming_lcel_with_memory(page: Page): - chat = ChatInterface(page) - chat.send("Remember this number: 8. Be concise.") - page.wait_for_timeout(10000) - chat.send("What number did I just ask you to remember?") - page.wait_for_timeout(10000) - - -def mistral_and_llama(page: Page): - chat = ChatInterface(page) - chat.send("What do you think about HoloViz in a single sentence?") - page.wait_for_timeout(15000) - - -def mistral_chat(page: Page): - chat = ChatInterface(page) - chat.send("What is HoloViz Panel in one sentence") - page.wait_for_timeout(4000) - - -def mistral_with_memory(page: Page): - chat = ChatInterface(page) - chat.send("Tell me what HoloViz Panel is in one sentence") - page.wait_for_timeout(3000) - chat.send("Tell me more") - page.wait_for_timeout(3000) - - -def mistral_api_chat(page: Page): - chat = ChatInterface(page) - chat.send("What is HoloViz Panel in one sentence") - page.wait_for_timeout(4000) - - -def openai_async_chat(page: Page): - chat = ChatInterface(page) - chat.send("What is HoloViz Panel in one sentence") - page.wait_for_timeout(4000) - - -def openai_authentication(page: Page): - chat = ChatInterface(page) - page.get_by_placeholder("sk-...").fill(os.environ["OPENAI_API_KEY"]) - page.get_by_placeholder("sk-...").press("Enter") - page.get_by_text( - "Your OpenAI key has been set. Feel free to minimize the sidebar." - ).wait_for() - page.wait_for_timeout(1000) - chat.send("Explain who you are in one sentence") - page.wait_for_timeout(3000) - - -def openai_chat(page: Page): - chat = ChatInterface(page) - chat.send("What is HoloViz Panel") - page.locator("div").filter(has_text=re.compile(r"^ChatGPT$")).first.dispatch_event( - "click" - ) - page.wait_for_timeout(2000) - - -def openai_with_memory(page: Page): - chat = ChatInterface(page) - chat.send("Remember this number 8") - page.locator("div").filter(has_text=re.compile(r"^ChatGPT$")).first.dispatch_event( - "click" - ) - page.wait_for_timeout(1500) - chat.send("What number did I just ask you to remember?") - page.wait_for_timeout(1000) - - -def openai_chat_with_hvplot(page: Page): - chat = ChatInterface(page) - chat.send("Plot the prices using distinct shades of pink") - page.wait_for_timeout(4000) - chat.send("Create an ohlc plot. Give it the title 'OHLC Plot'") - page.wait_for_timeout(4000) - - -def openai_hvplot(page: Page): - chat = ChatInterface(page) - page.get_by_role("textbox").set_input_files(EXAMPLE_CSV) - page.wait_for_timeout(1000) - chat.button_click(" Send") - page.get_by_role("combobox").select_option("clothing") - page.wait_for_timeout(1000) - - -def openai_image_generation(page: Page): - chat = ChatInterface(page) - chat.send("Two people on a beach in the style of Carl Barks") - page.get_by_text("DALL-E").wait_for() - page.locator("img").nth(1).wait_for() - page.wait_for_timeout(1000) - - -def openai_two_bots(page: Page): - chat = ChatInterface(page) - chat.send("HoloViz Panel") - page.wait_for_timeout(10000) - - -# get all the local functions here -# and put them in a dict -# so we can call them by name like {"openai_two_bots.py": openai_two_bots} -ACTION = {f"{func.__name__}.py": func for func in locals().values() if callable(func)} -ZOOM = {}