Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[example] Fix onchain ai chat web deployment error #3284

Merged
merged 3 commits into from
Feb 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 86 additions & 3 deletions examples/onchain_ai_chat/sources/ai_service.move
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,36 @@ module onchain_ai_chat::ai_service {
});
}

/// Escape special characters in JSON string content
fun escape_json_string(content: &String): String {
let result = vector::empty<u8>();
let bytes = string::bytes(content);
let i = 0;
let len = vector::length(bytes);
while (i < len) {
let byte = *vector::borrow(bytes, i);
if (byte == 0x22) { // double quote "
vector::append(&mut result, b"\\\"");
} else if (byte == 0x5c) { // backslash \
vector::append(&mut result, b"\\\\");
} else if (byte == 0x08) { // backspace
vector::append(&mut result, b"\\b");
} else if (byte == 0x0c) { // form feed
vector::append(&mut result, b"\\f");
} else if (byte == 0x0a) { // line feed
vector::append(&mut result, b"\\n");
} else if (byte == 0x0d) { // carriage return
vector::append(&mut result, b"\\r");
} else if (byte == 0x09) { // tab
vector::append(&mut result, b"\\t");
} else {
vector::push_back(&mut result, byte);
};
i = i + 1;
};
string::utf8(result)
}

fun build_chat_context(content: String, previous_messages: &vector<Message>): String {
//we use a fixed model for now, gpt-4o
let body = string::utf8(b"{\"model\": \"gpt-4o\", \"messages\": [");
Expand All @@ -62,17 +92,18 @@ module onchain_ai_chat::ai_service {
string::utf8(b"user")
});
string::append(&mut body, string::utf8(b"\", \"content\": \""));
string::append(&mut body, message::get_content(msg));
// Escape message content
string::append(&mut body, escape_json_string(&message::get_content(msg)));
string::append(&mut body, string::utf8(b"\"}"));
i = i + 1;
};

// Add current message
// Add current message with escaped content
if (len > 0) {
string::append(&mut body, string::utf8(b","));
};
string::append(&mut body, string::utf8(b"{\"role\": \"user\", \"content\": \""));
string::append(&mut body, content);
string::append(&mut body, escape_json_string(&content));
string::append(&mut body, string::utf8(b"\"}], \"temperature\": 0.7}"));

body
Expand Down Expand Up @@ -142,4 +173,56 @@ module onchain_ai_chat::ai_service {
public fun unpack_pending_request(request: PendingRequest): (ObjectID, ObjectID) {
(request.room_id, request.request_id)
}

#[test]
fun test_escape_json_string() {
let test_str = string::utf8(b"Hello \"world\"\nNew line\tTab");
let escaped = escape_json_string(&test_str);
assert!(escaped == string::utf8(b"Hello \\\"world\\\"\\nNew line\\tTab"), 1);
}

#[test]
fun test_build_chat_context() {
use std::string;

// Test with empty previous messages
{
let messages = vector::empty<Message>();
let content = string::utf8(b"Hello AI");
let context = build_chat_context(content, &messages);
let expected = string::utf8(b"{\"model\": \"gpt-4o\", \"messages\": [{\"role\": \"user\", \"content\": \"Hello AI\"}], \"temperature\": 0.7}");
assert!(context == expected, 1);
};

// Test with one previous message
{
let messages = vector::empty<Message>();
vector::push_back(&mut messages, message::new_message(0, @0x1, string::utf8(b"Hi"), message::type_user()));
let content = string::utf8(b"How are you?");
let context = build_chat_context(content, &messages);
let expected = string::utf8(b"{\"model\": \"gpt-4o\", \"messages\": [{\"role\": \"user\", \"content\": \"Hi\"},{\"role\": \"user\", \"content\": \"How are you?\"}], \"temperature\": 0.7}");
assert!(context == expected, 2);
};

// Test with conversation including AI response
{
let messages = vector::empty<Message>();
vector::push_back(&mut messages, message::new_message(0, @0x1, string::utf8(b"Hi"), message::type_user()));
vector::push_back(&mut messages, message::new_message(1, @0x2, string::utf8(b"Hello! How can I help?"), message::type_ai()));
let content = string::utf8(b"What's the weather?");
let context = build_chat_context(content, &messages);
let expected = string::utf8(b"{\"model\": \"gpt-4o\", \"messages\": [{\"role\": \"user\", \"content\": \"Hi\"},{\"role\": \"assistant\", \"content\": \"Hello! How can I help?\"},{\"role\": \"user\", \"content\": \"What's the weather?\"}], \"temperature\": 0.7}");
assert!(context == expected, 3);
};

// Test with special characters
{
let messages = vector::empty<Message>();
vector::push_back(&mut messages, message::new_message(0, @0x1, string::utf8(b"Hello \"AI\""), message::type_user()));
let content = string::utf8(b"New\nline");
let context = build_chat_context(content, &messages);
let expected = string::utf8(b"{\"model\": \"gpt-4o\", \"messages\": [{\"role\": \"user\", \"content\": \"Hello \\\"AI\\\"\"},{\"role\": \"user\", \"content\": \"New\\nline\"}], \"temperature\": 0.7}");
assert!(context == expected, 4);
};
}
}
2 changes: 2 additions & 0 deletions examples/onchain_ai_chat/web/.npmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
auto-install-peers=true
node-version=18
Loading
Loading