Skip to content

Commit

Permalink
lint: format all python file instead of just source code (vllm-projec…
Browse files Browse the repository at this point in the history
  • Loading branch information
simon-mo authored Jan 23, 2024
1 parent 9b945da commit 1e4277d
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/yapf.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ jobs:
pip install toml==0.10.2
- name: Running yapf
run: |
yapf --diff --recursive vllm tests
yapf --diff --recursive .
20 changes: 7 additions & 13 deletions benchmarks/benchmark_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,17 +92,9 @@ async def get_request(
await asyncio.sleep(interval)


async def send_request(
backend: str,
model: str,
api_url: str,
prompt: str,
prompt_len: int,
output_len: int,
best_of: int,
use_beam_search: bool,
pbar: tqdm
) -> None:
async def send_request(backend: str, model: str, api_url: str, prompt: str,
prompt_len: int, output_len: int, best_of: int,
use_beam_search: bool, pbar: tqdm) -> None:
request_start_time = time.perf_counter()

headers = {"User-Agent": "Benchmark Client"}
Expand Down Expand Up @@ -155,7 +147,6 @@ async def send_request(
pbar.update(1)



async def benchmark(
backend: str,
model: str,
Expand Down Expand Up @@ -217,7 +208,10 @@ def main(args: argparse.Namespace):
type=str,
default="vllm",
choices=["vllm", "tgi"])
parser.add_argument("--protocol", type=str, default="http", choices=["http", "https"])
parser.add_argument("--protocol",
type=str,
default="http",
choices=["http", "https"])
parser.add_argument("--host", type=str, default="localhost")
parser.add_argument("--port", type=int, default=8000)
parser.add_argument("--endpoint", type=str, default="/generate")
Expand Down
1 change: 0 additions & 1 deletion examples/openai_chatcompletion_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,5 @@
model=model,
)


print("Chat completion results:")
print(chat_completion)
3 changes: 1 addition & 2 deletions examples/openai_completion_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@
echo=False,
n=2,
stream=stream,
logprobs=3
)
logprobs=3)

print("Completion results:")
if stream:
Expand Down
2 changes: 1 addition & 1 deletion format.sh
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ format_changed() {

# Format all files
format_all() {
yapf --in-place "${YAPF_FLAGS[@]}" "${YAPF_EXCLUDES[@]}" vllm tests
yapf --in-place "${YAPF_FLAGS[@]}" "${YAPF_EXCLUDES[@]}" .
}

## This flag formats individual files. --files *must* be the first command line
Expand Down

0 comments on commit 1e4277d

Please sign in to comment.