Skip to content

Commit 738f668

Browse files
committed
mypy for vllm/utils
Signed-off-by: wwl2755 <wangwenlong2755@gmail.com>
1 parent 5799b37 commit 738f668

File tree

5 files changed

+27
-19
lines changed

5 files changed

+27
-19
lines changed

tools/pre_commit/mypy.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
"vllm/transformers_utils",
3737
"vllm/triton_utils",
3838
"vllm/usage",
39+
"vllm/utils",
3940
]
4041

4142
# After fixing errors resulting from changing follow_imports

vllm/engine/arg_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1240,10 +1240,12 @@ def create_engine_config(
12401240
self.model = model_config.model
12411241
self.tokenizer = model_config.tokenizer
12421242

1243+
# After ModelConfig init, tokenizer must be resolved (never None).
1244+
assert self.tokenizer is not None
12431245
(self.model, self.tokenizer, self.speculative_config) = (
12441246
maybe_override_with_speculators(
12451247
model=self.model,
1246-
tokenizer=self.tokenizer if self.tokenizer is not None else self.model,
1248+
tokenizer=self.tokenizer,
12471249
revision=self.revision,
12481250
trust_remote_code=self.trust_remote_code,
12491251
vllm_speculative_config=self.speculative_config,

vllm/multimodal/parse.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
Optional,
1414
TypeVar,
1515
Union,
16-
cast,
1716
)
1817

1918
import numpy as np
@@ -367,8 +366,7 @@ def _is_embeddings(
367366
if isinstance(data, torch.Tensor):
368367
return data.ndim == 3
369368
if is_list_of(data, torch.Tensor):
370-
tensors = cast(list[torch.Tensor], data)
371-
return tensors[0].ndim == 2
369+
return data[0].ndim == 2
372370

373371
return False
374372

@@ -426,6 +424,8 @@ def _parse_audio_data(
426424
if self._is_embeddings(data):
427425
return AudioEmbeddingItems(data)
428426

427+
# Normalize into a list of audio items
428+
data_items: list[AudioItem]
429429
if (
430430
is_list_of(data, float)
431431
or isinstance(data, (np.ndarray, torch.Tensor))
@@ -436,7 +436,7 @@ def _parse_audio_data(
436436
elif isinstance(data, (np.ndarray, torch.Tensor)):
437437
data_items = [elem for elem in data]
438438
else:
439-
data_items = data # type: ignore[assignment]
439+
data_items = data
440440

441441
new_audios = list[np.ndarray]()
442442
for data_item in data_items:

vllm/utils/__init__.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@
8181
import setproctitle
8282
import torch
8383
import torch.types
84-
import yaml
84+
import yaml # type: ignore[import-untyped]
8585
import zmq
8686
import zmq.asyncio
8787
from packaging import version
@@ -486,7 +486,10 @@ async def merge_async_iterators(
486486

487487
loop = asyncio.get_running_loop()
488488

489-
awaits = {loop.create_task(anext(it)): (i, it) for i, it in enumerate(iterators)}
489+
awaits: dict[asyncio.Task[T], tuple[int, AsyncGenerator[T, None]]] = {
490+
loop.create_task(anext(it)): (i, it) # type: ignore[arg-type]
491+
for i, it in enumerate(iterators)
492+
}
490493
try:
491494
while awaits:
492495
done, _ = await asyncio.wait(awaits.keys(), return_when=FIRST_COMPLETED)
@@ -495,7 +498,7 @@ async def merge_async_iterators(
495498
try:
496499
item = await d
497500
i, it = pair
498-
awaits[loop.create_task(anext(it))] = pair
501+
awaits[loop.create_task(anext(it))] = pair # type: ignore[arg-type]
499502
yield i, item
500503
except StopAsyncIteration:
501504
pass
@@ -1163,11 +1166,13 @@ def find_nccl_include_paths() -> list[str] | None:
11631166
import importlib.util
11641167

11651168
spec = importlib.util.find_spec("nvidia.nccl")
1166-
if spec and getattr(spec, "submodule_search_locations", None):
1167-
for loc in spec.submodule_search_locations:
1168-
inc_dir = os.path.join(loc, "include")
1169-
if os.path.exists(os.path.join(inc_dir, "nccl.h")):
1170-
paths.append(inc_dir)
1169+
if spec:
1170+
locations = getattr(spec, "submodule_search_locations", None)
1171+
if locations:
1172+
for loc in locations:
1173+
inc_dir = os.path.join(loc, "include")
1174+
if os.path.exists(os.path.join(inc_dir, "nccl.h")):
1175+
paths.append(inc_dir)
11711176
except Exception:
11721177
pass
11731178

vllm/utils/jsontree.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from collections.abc import Iterable
66
from functools import reduce
7-
from typing import TYPE_CHECKING, Callable, TypeVar, Union, cast, overload
7+
from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload
88

99
if TYPE_CHECKING:
1010
import torch
@@ -94,7 +94,7 @@ def json_map_leaves(
9494
for k, v in value.items()
9595
}
9696
elif isinstance(value, list):
97-
return [json_map_leaves(func, v) for v in value]
97+
return [json_map_leaves(func, v) for v in value] # type: ignore[return-value]
9898
elif isinstance(value, tuple):
9999
return tuple(json_map_leaves(func, v) for v in value)
100100
else:
@@ -143,11 +143,11 @@ def json_reduce_leaves(
143143

144144

145145
def json_reduce_leaves(
146-
func: Callable[..., Union[_T, _U]],
147-
value: _JSONTree[_T],
148-
initial: _U = cast(_U, ...), # noqa: B008
146+
func: Callable[..., Any],
147+
value: _JSONTree[Any],
148+
initial: Any = ..., # noqa: B008
149149
/,
150-
) -> Union[_T, _U]:
150+
) -> Any:
151151
"""
152152
Apply a function of two arguments cumulatively to each leaf in a
153153
nested JSON structure, from left to right, so as to reduce the

0 commit comments

Comments
 (0)