Skip to content
This repository has been archived by the owner on Jan 12, 2025. It is now read-only.

Commit

Permalink
fix threads parents
Browse files Browse the repository at this point in the history
  • Loading branch information
hughcrt committed Jan 11, 2025
1 parent 2bcae30 commit 324ffc7
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 11 deletions.
2 changes: 0 additions & 2 deletions examples/functions.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from dotenv import load_dotenv
import openai
import os
import lunary

load_dotenv()

openai.api_key = os.environ.get("OPENAI_API_KEY")

Expand Down
19 changes: 11 additions & 8 deletions examples/threads.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
import lunary
from dotenv import load_dotenv
from openai import OpenAI
import time

load_dotenv()
client = OpenAI()
lunary.monitor(client)

thread = lunary.open_thread()

thread.track_message({
"role": "user",
"content": "Hello!"
})
message = { "role": "user", "content": "Hello!" }
msg_id = thread.track_message(message)
chat_completion = client.chat.completions.create(
messages=[message],
model="gpt-4o",
parent=msg_id
)

time.sleep(0.5)

thread.track_message({
"role": "assistant",
"content": "How can I help you?"
"content": chat_completion.choices[0].message.content
})

time.sleep(0.5)
Expand Down
2 changes: 1 addition & 1 deletion lunary/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1127,7 +1127,6 @@ def on_llm_end(
run_id = run_manager.end_run(run_id)

token_usage = (response.llm_output or {}).get("token_usage", {})

parsed_output: Any = [
(
_parse_lc_message(generation.message)
Expand All @@ -1137,6 +1136,7 @@ def on_llm_end(
for generation in response.generations[0]
]

# if it's an array of 1, just parse the first element
if len(parsed_output) == 1:
parsed_output = parsed_output[0]

Expand Down
25 changes: 25 additions & 0 deletions lunary/parent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# used for reconciliating messages with Langchain: https://lunary.ai/docs/features/chats#reconciliate-with-llm-calls--agents
from contextvars import ContextVar

parent_ctx = ContextVar("parent_ctx", default=None)

class ParentContextManager:
def __init__(self, message_id: str):
parent_ctx.set({"message_id": message_id, "retrieved": False})

def __enter__(self):
pass

def __exit__(self, exc_type, exc_value, exc_tb):
parent_ctx.set(None)


def parent(id: str) -> ParentContextManager:
return ParentContextManager(id)

def get_parent():
parent = parent_ctx.get()
if parent and parent.get("retrieved", False) == False:
parent_ctx.set({"message_id": parent["message_id"], "retrieved": True})
return parent.get("message_id", None)
return None

0 comments on commit 324ffc7

Please sign in to comment.