forked from lm-sys/FastChat
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_sentence_similarity.py
73 lines (55 loc) · 1.97 KB
/
test_sentence_similarity.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import json
import os
import numpy as np
import openai
import requests
from scipy.spatial.distance import cosine
openai.api_key = os.getenv("OPENAI_API_KEY")
def get_embedding_from_api(word, model="vicuna-7b-v1.1"):
if "ada" in model:
try:
resp = openai.Embedding.create(
model=model,
input=word,
)
embedding = np.array(resp["data"][0]["embedding"])
return embedding
except:
print("Error: OpenAI API call failed")
return None
url = "http://localhost:8000/v1/create_embeddings"
headers = {"Content-Type": "application/json"}
data = json.dumps({"model": model, "input": word})
response = requests.post(url, headers=headers, data=data)
if response.status_code == 200:
embedding = np.array(response.json()["data"][0]["embedding"])
return embedding
else:
print(f"Error: {response.status_code} - {response.text}")
return None
def cosine_similarity(vec1, vec2):
return 1 - cosine(vec1, vec2)
def print_cosine_similarity(embeddings, texts):
for i in range(len(texts)):
for j in range(i + 1, len(texts)):
sim = cosine_similarity(embeddings[texts[i]], embeddings[texts[j]])
print(f"Cosine similarity between '{texts[i]}' and '{texts[j]}': {sim:.2f}")
texts = [
"The quick brown fox",
"The quick brown dog",
"The fast brown fox",
"A completely different sentence",
]
embeddings = {}
for text in texts:
embeddings[text] = get_embedding_from_api(text)
print("Vicuna-7B:")
print_cosine_similarity(embeddings, texts)
for text in texts:
embeddings[text] = get_embedding_from_api(text, model="text-similarity-ada-001")
print("text-similarity-ada-001:")
print_cosine_similarity(embeddings, texts)
for text in texts:
embeddings[text] = get_embedding_from_api(text, model="text-embedding-ada-002")
print("text-embedding-ada-002:")
print_cosine_similarity(embeddings, texts)