Skip to content

Commit e09fdd1

Browse files
simon-mojeejeelee
authored andcommitted
cleanup: remove adapter commons (vllm-project#25045)
Signed-off-by: Jee Jee Li <pandaleefree@gmail.com> Co-authored-by: Jee Jee Li <pandaleefree@gmail.com> Signed-off-by: xuebwang-amd <xuebwang@amd.com>
1 parent e65bdc9 commit e09fdd1

File tree

11 files changed

+89
-330
lines changed

11 files changed

+89
-330
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,6 @@ follow_imports = "silent"
115115
# move the directory here and remove it from tools/mypy.sh
116116
files = [
117117
"vllm/*.py",
118-
"vllm/adapter_commons",
119118
"vllm/assets",
120119
"vllm/entrypoints",
121120
"vllm/core",

vllm/adapter_commons/__init__.py

Whitespace-only changes.

vllm/adapter_commons/layers.py

Lines changed: 0 additions & 16 deletions
This file was deleted.

vllm/adapter_commons/models.py

Lines changed: 0 additions & 106 deletions
This file was deleted.

vllm/adapter_commons/request.py

Lines changed: 0 additions & 26 deletions
This file was deleted.

vllm/adapter_commons/utils.py

Lines changed: 0 additions & 93 deletions
This file was deleted.

vllm/adapter_commons/worker_manager.py

Lines changed: 0 additions & 39 deletions
This file was deleted.

vllm/lora/layers/utils.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,22 @@
11
# SPDX-License-Identifier: Apache-2.0
22
# SPDX-FileCopyrightText: Copyright contributors to the vLLM project
3+
34
from dataclasses import dataclass
45

56
import torch
67
import torch.nn as nn
78

8-
from vllm.adapter_commons.layers import AdapterMapping
9-
109

1110
@dataclass
12-
class LoRAMapping(AdapterMapping):
11+
class LoRAMapping:
12+
index_mapping: tuple[int, ...]
13+
prompt_mapping: tuple[int, ...]
1314
is_prefill: bool = False
1415

16+
def __post_init__(self):
17+
self.index_mapping = tuple(self.index_mapping)
18+
self.prompt_mapping = tuple(self.prompt_mapping)
19+
1520

1621
def _get_lora_device(base_layer: nn.Module) -> torch.device:
1722
# code borrowed from https://github.com/fmmoret/vllm/blob/fm-support-lora-on-quantized-models/vllm/lora/layers.py#L34

0 commit comments

Comments
 (0)