diff --git a/src/llmperf/models.py b/src/llmperf/models.py index be0d7ea..0c2e1b5 100644 --- a/src/llmperf/models.py +++ b/src/llmperf/models.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Dict, Optional, Tuple from pydantic import BaseModel diff --git a/src/llmperf/ray_clients/openai_chat_completions_client.py b/src/llmperf/ray_clients/openai_chat_completions_client.py index f2e0a91..2ee9931 100644 --- a/src/llmperf/ray_clients/openai_chat_completions_client.py +++ b/src/llmperf/ray_clients/openai_chat_completions_client.py @@ -5,6 +5,7 @@ import ray import requests +from yarl import URL from llmperf.ray_llm_client import LLMClient from llmperf.models import RequestConfig @@ -54,14 +55,9 @@ def llm_request(self, request_config: RequestConfig) -> Dict[str, Any]: if not key: raise ValueError("the environment variable OPENAI_API_KEY must be set.") headers = {"Authorization": f"Bearer {key}"} - if not address: - raise ValueError("No host provided.") - if not address.endswith("/"): - address = address + "/" - address += "chat/completions" try: with requests.post( - address, + URL(address).with_path("/chat/completions"), json=body, stream=True, timeout=180,