-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathol_stream_mesop.py
35 lines (28 loc) · 969 Bytes
/
ol_stream_mesop.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import mesop as me
import mesop.labs as mel
import ollama
# Set your model here
model = 'llama3'
@me.page(path="/chat")
def chat():
"""
This function is responsible for handling the "/chat" route and interacting with the OpenAI GPT model to generate responses based on user input.
:return: str
"""
mel.chat(transform)
def transform(prompt: str, history: list[mel.ChatMessage]) -> str:
"""
This function takes a prompt and a history of previous user inputs and generates a response using the OpenAI GPT model from the Ollama library.
Args:
prompt (str): The user's input or question.
history (list[mel.ChatMessage]): A list of previous user inputs and corresponding responses.
Returns:
str: The generated response using GPT model.
"""
stream = ollama.chat(
model = model,
messages=[{'role': 'user', 'content': f'{history}\n{prompt}'}],
stream=True,
)
for chunk in stream:
yield(chunk['message']['content'])