Skip to content

Commit

Permalink
Save configuration to Solid pod
Browse files Browse the repository at this point in the history
  • Loading branch information
Vidminas committed Apr 21, 2024
1 parent f60e972 commit 70bb7e0
Show file tree
Hide file tree
Showing 3 changed files with 132 additions and 43 deletions.
75 changes: 75 additions & 0 deletions src/chat_app/config_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
from rdflib import Graph, URIRef, Namespace, FOAF

from chat_app.solid_pod_utils import SolidPodUtils

ldp_ns = Namespace("http://www.w3.org/ns/ldp#")


def read_config(
solid_utils: SolidPodUtils,
default_retrieval_service: str,
default_llm_service: str,
default_docs_location: str,
):
config = solid_utils.read_solid_item(solid_utils.config_uri)
if (
config_retrieval_service := config.value(
subject=URIRef(f"{solid_utils.config_uri}#retrieval_service"),
predicate=FOAF.accountServiceHomepage,
)
) is not None:
default_retrieval_service = config_retrieval_service
if (
config_llm_service := config.value(
subject=URIRef(f"{solid_utils.config_uri}#llm_service"),
predicate=FOAF.accountServiceHomepage,
)
) is not None:
default_llm_service = config_llm_service
if (
config_docs_location := config.value(
subject=URIRef(f"{solid_utils.config_uri}#docs_location"),
predicate=ldp_ns.Resource,
)
) is not None:
default_docs_location = config_docs_location

return default_retrieval_service, default_llm_service, default_docs_location


def write_config(
solid_utils: SolidPodUtils,
retrieval_service: str,
llm_service: str,
docs_location: str,
):
delete_wheres = []
inserts = []
if retrieval_service:
retrieval_service_node = URIRef(f"{solid_utils.config_uri}#retrieval_service")
delete_wheres.append(
f"{retrieval_service_node.n3()} {FOAF.accountServiceHomepage.n3()} ?uri"
)
inserts.append(
f"{retrieval_service_node.n3()} {FOAF.accountServiceHomepage.n3()} <{retrieval_service}>"
)
if llm_service:
llm_service_node = URIRef(f"{solid_utils.config_uri}#llm_service")
delete_wheres.append(
f"{llm_service_node.n3()} {FOAF.accountServiceHomepage.n3()} ?uri"
)
inserts.append(
f"{llm_service_node.n3()} {FOAF.accountServiceHomepage.n3()} <{llm_service}>"
)
if docs_location:
docs_location_node = URIRef(f"{solid_utils.config_uri}#docs_location")
delete_wheres.append(f"{docs_location_node.n3()} {ldp_ns.Resource.n3()} ?uri")
inserts.append(
f"{docs_location_node.n3()} {ldp_ns.Resource.n3()} <{docs_location}>"
)

delete_wheres = " .\n".join(delete_wheres)
solid_utils.update_solid_item(solid_utils.config_uri, f"DELETE WHERE {{ {delete_wheres} }}")

inserts = " .\n".join(inserts)
solid_utils.update_solid_item(solid_utils.config_uri, f"INSERT DATA {{ {inserts} }}")
92 changes: 52 additions & 40 deletions src/chat_app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from chat_app.solid_message_history import SolidChatMessageHistory
from chat_app.solid_pod_utils import SolidPodUtils
from chat_app.config_utils import read_config, write_config
from chat_app.apis.base_api import BaseRetrievalServiceAPI, BaseLLMAPI
from chat_app.apis.demo_api import DemoEmbeddingsAPI, DemoLLMAPI
from chat_app.apis.openai_api import OpenAIEmbeddingsAPI, OpenAILLMAPI
Expand Down Expand Up @@ -65,37 +66,40 @@ def show_login_sidebar():


def show_config(solid_utils: SolidPodUtils):
config = solid_utils.read_solid_item(solid_utils.config_uri)
if len(config):
return

with st.form("reuse-config-form"):
st.title("Reuse an existing configuration")
st.text_input("Configuration file URI")
st.form_submit_button("Import configuration")
default_retrieval_service, default_llm_service, default_docs_location = read_config(
solid_utils,
default_retrieval_service="http://localhost:5000/",
default_llm_service="http://localhost:5000/",
default_docs_location="",
)

# with st.form("reuse-config-form"):
# st.title("Reuse an existing configuration")
# st.text_input("Configuration file URI")
# st.form_submit_button("Import configuration")

with st.container(border=True):
st.title("Or configure your chatbot service")
st.title("Configure your chatbot service")
col1, col2 = st.columns((1, 2))

embeddings_params = {
"Demo": {
"label": "Retrieval service provider",
"value": "http://localhost:5000/",
},
"OpenAI": {
"label": "Secret Key",
"placeholder": "sk-...",
"key": "openai-embeddings-sk",
"value": default_retrieval_service,
},
# "OpenAI": {
# "label": "Secret Key",
# "placeholder": "sk-...",
# "key": "openai-embeddings-sk",
# },
}
llm_params = {
"Demo": {"label": "LLM provider", "value": "http://localhost:5000/"},
"OpenAI": {
"label": "Secret Key",
"placeholder": "sk-...",
"key": "openai-llm-sk",
},
"Demo": {"label": "LLM provider", "value": default_llm_service},
# "OpenAI": {
# "label": "Secret Key",
# "placeholder": "sk-...",
# "key": "openai-llm-sk",
# },
}

embeddings_api = col1.selectbox(
Expand All @@ -108,7 +112,8 @@ def show_config(solid_utils: SolidPodUtils):
llm_param = col2.text_input(**llm_params[llm_api])

documents_location = st.text_input(
"Documents location URL for retrieval (optional)"
"Documents location URL for retrieval (optional)",
value=default_docs_location,
)

submitted = st.form_submit_button("Confirm")
Expand All @@ -124,7 +129,7 @@ def show_config(solid_utils: SolidPodUtils):
retrieval_service: BaseRetrievalServiceAPI = retrieval_providers[
embeddings_api
](solid_utils, embeddings_param)
llm_provider: BaseLLMAPI = llm_providers[llm_api](
llm_service: BaseLLMAPI = llm_providers[llm_api](
solid_utils, llm_param
)

Expand All @@ -133,22 +138,19 @@ def show_config(solid_utils: SolidPodUtils):

st.session_state["provider_config"] = (
retrieval_service,
llm_provider,
llm_service,
documents_location,
)
write_config(
solid_utils,
embeddings_param,
llm_param,
documents_location,
)
st.rerun()


def show_chats_sidebar(solid_utils: SolidPodUtils):
st.sidebar.markdown(f"Logged in as <{solid_utils.webid}>")

def logout():
# TODO: this should also revoke the token, but not implemented yet
del st.session_state["solid_token"]
st.session_state.pop("llm_options", None)
st.session_state.pop("msg_history", None)

st.sidebar.button("Log Out", on_click=logout)

threads = solid_utils.list_container_items(solid_utils.workspace_uri)
if "msg_history" not in st.session_state:
st.session_state["msg_history"] = SolidChatMessageHistory(
Expand Down Expand Up @@ -221,17 +223,27 @@ def main():
return

solid_utils = SolidPodUtils(st.session_state["solid_token"])
show_chats_sidebar(solid_utils)
st.sidebar.markdown(f"Logged in as <{solid_utils.webid}>")

def logout():
# TODO: this should also revoke the token, but not implemented yet
del st.session_state["solid_token"]
st.session_state.pop("provider_config", None)
st.session_state.pop("llm_options", None)
st.session_state.pop("msg_history", None)

st.sidebar.button("Log Out", on_click=logout)

if "provider_config" not in st.session_state:
show_config(solid_utils)
return
retrieval_service: BaseRetrievalServiceAPI = st.session_state["provider_config"][0]
llm_provider: BaseLLMAPI = st.session_state["provider_config"][1]
llm_service: BaseLLMAPI = st.session_state["provider_config"][1]
documents_location: str = st.session_state["provider_config"][2]
show_chats_sidebar(solid_utils)

st.sidebar.markdown(str(retrieval_service))
st.sidebar.markdown(str(llm_provider))
st.sidebar.markdown(str(llm_service))

def reset_config():
del st.session_state["provider_config"]
Expand All @@ -245,7 +257,7 @@ def reset_config():
)

if "llm_options" not in st.session_state:
st.session_state["llm_options"] = llm_provider.get_llm_models()
st.session_state["llm_options"] = llm_service.get_llm_models()
selected_llm = st.sidebar.radio("LLM", st.session_state["llm_options"])

if "msg_history" not in st.session_state:
Expand All @@ -269,7 +281,7 @@ def reset_config():

if len(history.messages) > 1:
with st.spinner("LLM is thinking..."):
condensed_prompt = llm_provider.condense_prompt_with_chat_history(
condensed_prompt = llm_service.condense_prompt_with_chat_history(
selected_llm, history.messages
)
with st.chat_message("ai"):
Expand All @@ -291,7 +303,7 @@ def reset_config():
relevant_documents = None

with st.spinner("LLM is thinking..."):
ai_msg = llm_provider.chat_completion(
ai_msg = llm_service.chat_completion(
selected_llm, condensed_prompt, relevant_documents
)
with st.chat_message("ai"):
Expand Down
8 changes: 5 additions & 3 deletions src/chat_app/solid_pod_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,11 @@ def create_solid_item(self, uri: str) -> None:
headers={
"Accept": "text/turtle",
"If-None-Match": "*",
"Link": f'{ldp_ns.BasicContainer.n3()}; rel="type"'
if uri.endswith("/")
else f'{ldp_ns.Resource.n3()}; rel="type"',
"Link": (
f'{ldp_ns.BasicContainer.n3()}; rel="type"'
if uri.endswith("/")
else f'{ldp_ns.Resource.n3()}; rel="type"'
),
"Slug": get_item_name(uri),
"Content-Type": "text/turtle",
**self.solid_auth.get_auth_headers(uri, "PUT"),
Expand Down

0 comments on commit 70bb7e0

Please sign in to comment.