Skip to content

Commit 1ffe53a

Browse files
committed
Address utility and presentation type errors
Signed-off-by: Jared O'Connell <joconnel@redhat.com>
1 parent cb36c6a commit 1ffe53a

File tree

5 files changed

+58
-39
lines changed

5 files changed

+58
-39
lines changed

src/guidellm/presentation/data_models.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -117,13 +117,13 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]):
117117
range(len(successful_requests)), min(5, len(successful_requests))
118118
)
119119
sample_prompts = [
120-
successful_requests[i].request_args.replace("\n", " ").replace('"', "'")
121-
if successful_requests[i].request_args is not None
122-
else ""
120+
req.request_args.replace("\n", " ").replace('"', "'")
121+
if (req := successful_requests[i]).request_args else ""
123122
for i in sample_indices
124123
]
125124
sample_outputs = [
126-
req.output.replace("\n", " ").replace('"', "'") if (req := successful_requests[i]).output else ""
125+
req.output.replace("\n", " ").replace('"', "'")
126+
if (req := successful_requests[i]).output else ""
127127
for i in sample_indices
128128
]
129129

src/guidellm/utils/encoding.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232

3333
HAS_MSGSPEC = True
3434
except ImportError:
35-
MsgspecDecoder = MsgspecEncoder = None
35+
MsgspecDecoder = MsgspecEncoder = None # type: ignore[misc, assignment] # HAS_MSGSPEC will be checked at runtime
3636
HAS_MSGSPEC = False
3737

3838

src/guidellm/utils/imports.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
try:
44
import orjson as json
55
except ImportError:
6-
import json
6+
import json # type: ignore[no-redef] # Done only after a failure.
77

88

99
__all__ = ["json"]

src/guidellm/utils/registry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ class TokenProposal(RegistryMixin):
6565
:cvar registry_populated: Track whether auto-discovery has completed
6666
"""
6767

68-
registry: ClassVar[dict[str, RegistryObjT] | None] = None
68+
registry: ClassVar[dict[str, RegistryObjT] | None] = None # type: ignore[misc]
6969
registry_auto_discovery: ClassVar[bool] = False
7070
registry_populated: ClassVar[bool] = False
7171

src/guidellm/utils/statistics.py

Lines changed: 51 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -283,40 +283,12 @@ def from_request_times(
283283
)
284284

285285
# First convert to timing events based on type
286-
events: list[tuple[float, float]] = []
287-
288-
if distribution_type == "concurrency":
289-
# For concurrency, each request adds to concurrency at start
290-
# and subtracts at end
291-
for (start, end), weight in zip(requests, weights, strict=False):
292-
events.append((start, weight))
293-
events.append((end, -1 * weight))
294-
elif distribution_type == "rate":
295-
# For rate, each request is added at the end time only
296-
global_start = min(start for start, _ in requests) if requests else 0.0
297-
events.append((global_start, 0.0))
298-
for (_, end), weight in zip(requests, weights, strict=False):
299-
events.append((end, weight))
300-
else:
301-
raise ValueError(
302-
f"Invalid distribution_type '{distribution_type}'. "
303-
"Must be 'concurrency' or 'rate'."
304-
)
305-
306-
# Combine any events within epsilon of each other for stability
307-
sorted_events = sorted(events, key=lambda event: event[0])
308-
flattened_events: list[tuple[float, float]] = (
309-
[sorted_events.pop(0)] if sorted_events else []
286+
events = DistributionSummary._convert_to_timing_events(
287+
requests, distribution_type, weights
310288
)
311-
last_time = flattened_events[0][0] if flattened_events else 0.0
312289

313-
for time, val in sorted_events:
314-
if abs(time - last_time) <= epsilon:
315-
last_val = flattened_events[-1][1]
316-
flattened_events[-1] = (last_time, last_val + val)
317-
else:
318-
last_time = time
319-
flattened_events.append((time, val))
290+
# Combine any events within epsilon of each other for stability
291+
flattened_events = DistributionSummary._combine_events(events, epsilon)
320292

321293
# Convert events to value distribution function
322294
distribution: dict[float, float] = defaultdict(float)
@@ -357,6 +329,53 @@ def from_request_times(
357329
include_cdf=include_cdf,
358330
)
359331

332+
@staticmethod
333+
def _convert_to_timing_events(
334+
requests: list[tuple[float, float]],
335+
distribution_type: Literal["concurrency", "rate"],
336+
weights: list[float],
337+
) -> list[tuple[float, float]]:
338+
events: list[tuple[float, float]] = []
339+
340+
if distribution_type == "concurrency":
341+
# For concurrency, each request adds to concurrency at start
342+
# and subtracts at end
343+
for (start, end), weight in zip(requests, weights, strict=False):
344+
events.append((start, weight))
345+
events.append((end, -1 * weight))
346+
elif distribution_type == "rate":
347+
# For rate, each request is added at the end time only
348+
global_start = min(start for start, _ in requests) if requests else 0.0
349+
events.append((global_start, 0.0))
350+
for (_, end), weight in zip(requests, weights, strict=False):
351+
events.append((end, weight))
352+
else:
353+
raise ValueError(
354+
f"Invalid distribution_type '{distribution_type}'. "
355+
"Must be 'concurrency' or 'rate'."
356+
)
357+
return events
358+
359+
@staticmethod
360+
def _combine_events(
361+
events: list[tuple[float, float]],
362+
epsilon: float,
363+
) -> list[tuple[float, float]]:
364+
sorted_events = sorted(events, key=lambda event: event[0])
365+
flattened_events: list[tuple[float, float]] = (
366+
[sorted_events.pop(0)] if sorted_events else []
367+
)
368+
last_time = flattened_events[0][0] if flattened_events else 0.0
369+
370+
for time, val in sorted_events:
371+
if abs(time - last_time) <= epsilon:
372+
last_val = flattened_events[-1][1]
373+
flattened_events[-1] = (last_time, last_val + val)
374+
else:
375+
last_time = time
376+
flattened_events.append((time, val))
377+
return flattened_events
378+
360379
@staticmethod
361380
def from_iterable_request_times(
362381
requests: list[tuple[float, float]],

0 commit comments

Comments
 (0)