Skip to content

Commit

Permalink
Change order of imports
Browse files Browse the repository at this point in the history
Signed-off-by: Fred Reiss <[email protected]>
  • Loading branch information
frreiss committed Jan 10, 2025
1 parent 94d4e06 commit c20ab65
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions vllm/entrypoints/openai/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from contextlib import asynccontextmanager
from functools import partial
from http import HTTPStatus
from typing import AsyncIterator, Optional, Set, Tuple, Dict, Union
from typing import AsyncIterator, Dict, Optional, Set, Tuple, Union

import uvloop
from fastapi import APIRouter, FastAPI, HTTPException, Request
Expand Down Expand Up @@ -419,7 +419,7 @@ async def create_embedding(request: EmbeddingRequest, raw_request: Request):
"use the Pooling API (`/pooling`) instead.")

res = await fallback_handler.create_pooling(request, raw_request)

generator: Union[ErrorResponse, EmbeddingResponse]
if isinstance(res, PoolingResponse):
generator = EmbeddingResponse(
Expand Down Expand Up @@ -495,7 +495,7 @@ async def create_score_v1(request: ScoreRequest, raw_request: Request):
return await create_score(request, raw_request)


TASK_HANDLERS: Dict[str, Dict[str,tuple]] = {
TASK_HANDLERS: Dict[str, Dict[str, tuple]] = {
"generate": {
"messages": (ChatCompletionRequest, create_chat_completion),
"default": (CompletionRequest, create_completion),
Expand Down

0 comments on commit c20ab65

Please sign in to comment.