Skip to content

Commit

Permalink
Lint pre-commit failures
Browse files Browse the repository at this point in the history
  • Loading branch information
ots22 committed Jun 13, 2024
1 parent 24aef00 commit 521d3ad
Show file tree
Hide file tree
Showing 3 changed files with 180 additions and 11 deletions.
170 changes: 169 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion reginald/llm_reginald/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
from llm_reginald import *

20 changes: 11 additions & 9 deletions reginald/llm_reginald/llm_reginald.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import uuid
from typing import Optional
from urllib.parse import urljoin

import httpx
import llm
from urllib.parse import urljoin
from typing import Optional
from pydantic import Field


Expand All @@ -19,13 +20,12 @@ class Options(llm.Options):
server_url: str = Field(
default="http://localhost:8000",
title="Server URL",
description="The base URL of the Reginald server"
description="The base URL of the Reginald server",
)

def direct_message_endpoint(self):
return urljoin(self.server_url, "direct_message")


def execute(self, prompt, stream, response, conversation):

message = prompt.prompt
Expand All @@ -37,7 +37,7 @@ def execute(self, prompt, stream, response, conversation):
# continue that conversation.

try:
user_id = conversation.responses[0].response_json['user_id']
user_id = conversation.responses[0].response_json["user_id"]
except (TypeError, AttributeError, KeyError, IndexError) as e:
user_id = str(uuid.uuid4().int)

Expand All @@ -46,13 +46,15 @@ def execute(self, prompt, stream, response, conversation):
reginald_reply = client.post(
prompt.options.direct_message_endpoint(),
json={"message": message, "user_id": user_id},
timeout=None
timeout=None,
)
reginald_reply.raise_for_status()
except httpx.HTTPError as e:
# re-raise as an llm.ModelError for llm to report
raise llm.ModelError(f"Could not connect to Reginald at {prompt.options.direct_message_endpoint()}.\n\nThe error was:\n {e}.\n\nIs the model server running?")
raise llm.ModelError(
f"Could not connect to Reginald at {prompt.options.direct_message_endpoint()}.\n\nThe error was:\n {e}.\n\nIs the model server running?"
)

yield reginald_reply.json()['message']
yield reginald_reply.json()["message"]

response.response_json = {'user_id': user_id}
response.response_json = {"user_id": user_id}

0 comments on commit 521d3ad

Please sign in to comment.