-
Notifications
You must be signed in to change notification settings - Fork 2
/
1-pgvector.py
118 lines (92 loc) · 3.01 KB
/
1-pgvector.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import sqlite3
import struct
import time
import psycopg2
from pgvector.psycopg2 import register_vector
from sentence_transformers import SentenceTransformer
conn_params = {
"dbname": "store",
"user": "postgres",
"password": "postgres",
"host": "localhost",
"port": "5430",
}
# Connect to the PostgreSQL database
postgres = psycopg2.connect(**conn_params)
cur = postgres.cursor()
cur.execute("SET search_path TO store, public")
register_vector(cur)
model = SentenceTransformer(
"sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
)
# Function to fetch Bible text from the SQLite database and split it into sentences
def generate_embeddings():
# Initialize variables
batch_size = 1000
offset = 0
try:
while True:
# Query to select text from Chapter with LIMIT and OFFSET
query = f"""
SELECT text, translationId, bookId, chapterNumber, Number
FROM store."ChapterVerse"
WHERE embedding IS NULL AND translationId = 'rus_syn'
ORDER BY chapterNumber, number
LIMIT {batch_size} OFFSET {offset}
"""
# AND translationId = 'rus_syn'
# Execute the query and fetch results
cur.execute(query)
rows = cur.fetchall()
# If no more rows are returned, break the loop
if not rows:
break
for row in rows:
text = row[0]
# print(text)
embeddings = model.encode(text)
cur.execute(
f"""
UPDATE store."ChapterVerse"
SET embedding = %s
WHERE translationId = %s AND bookId = %s AND chapterNumber = %s AND Number = %s
""",
(embeddings, row[1], row[2], row[3], row[4]),
)
# Commit the transaction
postgres.commit()
# Increment the offset for the next batch
offset += batch_size
except Exception as e:
print(f"An error occurred: {e}")
finally:
# Close the cursor and connection
if cur is not None:
cur.close()
# Close the database connection
postgres.close()
def pgvector_search(embedding):
cur.execute(
f"""
SELECT text, 1 - (embedding <=> %s::store.vector) AS similarity
FROM store."ChapterVerse"
WHERE embedding IS NOT NULL
ORDER BY similarity desc
LIMIT 10;
""",
(embedding,),
)
for r in cur.fetchall():
# print(r)
print(f"Text: {r[0]}; Similarity: {r[1]}")
generate_embeddings()
embedding = model.encode("воскресил из мертвых")
start_time = time.perf_counter()
pgvector_search(embedding)
pgvector_search(embedding)
pgvector_search(embedding)
pgvector_search(embedding)
pgvector_search(embedding)
end_time = time.perf_counter()
elapsed_time = end_time - start_time
print(f"Search time: {elapsed_time/5} sec")