Skip to content

Commit 047d37b

Browse files
committed
Allow sequence of rates
1 parent 91651b0 commit 047d37b

File tree

2 files changed

+6
-7
lines changed

2 files changed

+6
-7
lines changed

src/guidellm/benchmark/scenario.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from pathlib import Path
2-
from typing import Any, Dict, Literal, Optional, Union
2+
from typing import Any, Dict, Literal, Optional, Sequence, Union
33

44
from pydantic import Field
55
from typing_extensions import Self
@@ -21,7 +21,7 @@ class Scenario(Serializable):
2121
data: Union[str, Dict[str, Any]] = Field(default_factory=dict) # type: ignore[arg-type]
2222
data_type: Literal["emulated", "file", "transformers"] = "emulated"
2323
rate_type: ProfileGenerationMode = "sweep"
24-
rate: Optional[float] = None
24+
rate: Optional[Union[float, Sequence[float]]] = None
2525
max_seconds: int = 120
2626
max_requests: Optional[Union[int, Literal["dataset"]]] = None
2727

src/guidellm/main.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import asyncio
2-
from typing import IO, Any, Literal, Optional, Union, get_args
2+
from typing import Literal, Optional, Sequence, TextIO, Union, get_args
33

44
import click
55
from loguru import logger
@@ -165,14 +165,14 @@
165165
)
166166
def generate_benchmark_report_cli(
167167
target: str,
168-
scenario: Optional[Union[IO[Any], str]],
168+
scenario: Optional[Union[TextIO, str]],
169169
backend: Optional[BackendType],
170170
model: Optional[str],
171171
data: Optional[str],
172172
data_type: Optional[Literal["emulated", "file", "transformers"]],
173173
tokenizer: Optional[str],
174174
rate_type: Optional[ProfileGenerationMode],
175-
rate: Optional[float],
175+
rate: Optional[Union[float, Sequence[float]]],
176176
max_seconds: Optional[int],
177177
max_requests: Union[Literal["dataset"], int, None],
178178
output_path: Optional[str],
@@ -184,9 +184,8 @@ def generate_benchmark_report_cli(
184184

185185
if isinstance(scenario, str):
186186
defaults = SCENARIOS[scenario]
187-
elif isinstance(scenario, IO):
187+
elif isinstance(scenario, TextIO):
188188
defaults = Scenario.from_json(scenario.read())
189-
SCENARIOS["custom"] = defaults
190189
elif scenario is None:
191190
defaults = Scenario()
192191
else:

0 commit comments

Comments
 (0)