Skip to content
This repository has been archived by the owner on Jan 12, 2025. It is now read-only.

feat: Ibm watsonx integration #74

Merged
merged 6 commits into from
Jan 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions examples/anthropic/async-streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
import asyncio
from anthropic import AsyncAnthropic
import lunary

client = AsyncAnthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted
)
lunary.monitor(client)


async def main() -> None:
stream = await client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)
for event in stream:
pass


asyncio.run(main())
26 changes: 26 additions & 0 deletions examples/anthropic/async.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import os
import asyncio
from anthropic import AsyncAnthropic
import lunary

client = AsyncAnthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"), # This is the default and can be omitted
)
lunary.monitor(client)


async def main() -> None:
message = await client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)
print(message.content)


asyncio.run(main())
22 changes: 22 additions & 0 deletions examples/anthropic/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
from anthropic import Anthropic
import lunary

client = Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"),
)
lunary.monitor(client)


message = client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
)

print(message.ro
26 changes: 26 additions & 0 deletions examples/anthropic/streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import os
from anthropic import Anthropic
import lunary

client = Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"),
)
lunary.monitor(client)


stream = client.messages.create(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Hello, Claude",
}
],
model="claude-3-opus-20240229",
stream=True
)

for event in stream:
pass


86 changes: 86 additions & 0 deletions examples/anthropic/tool-call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
from anthropic import Anthropic
import lunary

client = Anthropic()
lunary.monitor(client)

response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=[
{
"name": "get_weather",
"description": "Get the current weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
}
},
"required": ["location"],
},
}
],
messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}],
)

response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=[
{
"name": "get_weather",
"description": "Get the current weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "The unit of temperature, either 'celsius' or 'fahrenheit'"
}
},
"required": ["location"]
}
}
],
messages=[
{
"role": "user",
"content": "What's the weather like in San Francisco?"
},
{
"role": "assistant",
"content": [
{
"type": "text",
"text": "<thinking>I need to use get_weather, and the user wants SF, which is likely San Francisco, CA.</thinking>"
},
{
"type": "tool_use",
"id": "toolu_01A09q90qw90lq917835lq9",
"name": "get_weather",
"input": {"location": "San Francisco, CA", "unit": "celsius"}
}
]
},
{
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": "toolu_01A09q90qw90lq917835lq9", # from the API response
"content": "65 degrees" # from running your tool
}
]
}
]
)

print(response)
34 changes: 0 additions & 34 deletions examples/evals.py

This file was deleted.

2 changes: 0 additions & 2 deletions examples/functions.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from dotenv import load_dotenv
import openai
import os
import lunary

load_dotenv()

openai.api_key = os.environ.get("OPENAI_API_KEY")

Expand Down
23 changes: 23 additions & 0 deletions examples/ibm/async.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import os, asyncio
from ibm_watsonx_ai import Credentials
from ibm_watsonx_ai.foundation_models import ModelInference
import lunary

model = ModelInference(
model_id="meta-llama/llama-3-1-8b-instruct",
credentials=Credentials(
api_key=os.environ.get("IBM_API_KEY"),
url = "https://us-south.ml.cloud.ibm.com"),
project_id=os.environ.get("IBM_PROJECT_ID")
)
lunary.monitor(model)

async def main():
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Who won the world series in 2020?"}
]
response = await model.achat(messages=messages)
print(response)

asyncio.run(main())
20 changes: 20 additions & 0 deletions examples/ibm/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import os
from ibm_watsonx_ai import Credentials
from ibm_watsonx_ai.foundation_models import ModelInference
import lunary

model = ModelInference(
model_id="meta-llama/llama-3-1-8b-instruct",
credentials=Credentials(
api_key=os.environ.get("IBM_API_KEY"),
url = "https://us-south.ml.cloud.ibm.com"),
project_id=os.environ.get("IBM_PROJECT_ID")
)
lunary.monitor(model)

messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Who won the world series in 2020?"}
]
# response = model.chat(messages=messages, tags=["baseball"], user_id="1234", user_props={"name": "Alice"})
response = model.chat(messages=messages)
22 changes: 22 additions & 0 deletions examples/ibm/stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
from ibm_watsonx_ai import Credentials
from ibm_watsonx_ai.foundation_models import ModelInference
import lunary

model = ModelInference(
model_id="meta-llama/llama-3-1-8b-instruct",
credentials=Credentials(
api_key=os.environ.get("IBM_API_KEY"),
url = "https://us-south.ml.cloud.ibm.com"),
project_id=os.environ.get("IBM_PROJECT_ID")
)
lunary.monitor(model)

messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Who won the world series in 2020?"}
]
response = model.chat_stream(messages=messages)

for chunk in response:
pass
36 changes: 36 additions & 0 deletions examples/ibm/tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os
from ibm_watsonx_ai import Credentials
from ibm_watsonx_ai.foundation_models import ModelInference
import lunary

model = ModelInference(
model_id="meta-llama/llama-3-1-8b-instruct",
credentials=Credentials(
api_key=os.environ.get("IBM_API_KEY"),
url = "https://us-south.ml.cloud.ibm.com"),
project_id=os.environ.get("IBM_PROJECT_ID")
)
lunary.monitor(model)

tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"}
},
},
},
}
]
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "What's the weather like in Paris today?"}
]
response = model.chat(
messages=messages,
tools=tools,
)
Loading
Loading