Skip to content

Commit c9939b3

Browse files
authored
Remove repeated import (#40937)
* Remove repeated import Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> * Fix conflict Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> --------- Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
1 parent 4f36011 commit c9939b3

File tree

7 files changed

+1
-13
lines changed

7 files changed

+1
-13
lines changed

src/transformers/data/data_collator.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -737,8 +737,6 @@ def get_generator(self, seed):
737737

738738
return torch.Generator().manual_seed(seed)
739739
else:
740-
import numpy as np
741-
742740
return np.random.default_rng(seed)
743741

744742
def create_rng(self):

src/transformers/integrations/integration_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -544,8 +544,6 @@ def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> Be
544544

545545

546546
def run_hp_search_wandb(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
547-
from ..integrations import is_wandb_available
548-
549547
if not is_wandb_available():
550548
raise ImportError("This function needs wandb installed: `pip install wandb`")
551549
import wandb

src/transformers/integrations/tensor_parallel.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,8 +1103,6 @@ def distribute_model(model, distributed_config, device_mesh, tp_size):
11031103
raise ValueError(f"Unsupported tensor parallel style {v}. Supported styles are {ALL_PARALLEL_STYLES}")
11041104
for name, module in model.named_modules():
11051105
if not getattr(module, "_is_hooked", False):
1106-
from transformers.integrations.tensor_parallel import add_tensor_parallel_hooks_to_module
1107-
11081106
plan = _get_parameter_tp_plan(parameter_name=name, tp_plan=model_plan, is_weight=False)
11091107
add_tensor_parallel_hooks_to_module(
11101108
model=model,

src/transformers/modeling_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2242,8 +2242,6 @@ def tp_plan(self, plan: dict[str, str]):
22422242
flexible_matched = True
22432243
break
22442244
if not flexible_matched:
2245-
import warnings
2246-
22472245
warnings.warn(
22482246
f"Layer pattern '{layer_pattern}' does not match any parameters in the model. "
22492247
f"This rule may not be applied during tensor parallelization."

src/transformers/testing_utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import ast
1616
import collections
1717
import contextlib
18+
import copy
1819
import doctest
1920
import functools
2021
import gc
@@ -2752,8 +2753,6 @@ def wrapper(*args, **kwargs):
27522753
else:
27532754
test = " ".join(os.environ.get("PYTEST_CURRENT_TEST").split(" ")[:-1])
27542755
try:
2755-
import copy
2756-
27572756
env = copy.deepcopy(os.environ)
27582757
env["_INSIDE_SUB_PROCESS"] = "1"
27592758
# This prevents the entries in `short test summary info` given by the subprocess being truncated. so the

src/transformers/utils/hub.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1084,7 +1084,6 @@ def get_checkpoint_shard_files(
10841084
For the description of each arg, see [`PreTrainedModel.from_pretrained`]. `index_filename` is the full path to the
10851085
index (downloaded and cached if `pretrained_model_name_or_path` is a model ID on the Hub).
10861086
"""
1087-
import json
10881087

10891088
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
10901089
if use_auth_token is not None:

src/transformers/utils/metrics.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,6 @@ def decorator(func):
105105
if not _has_opentelemetry:
106106
return func
107107

108-
import functools
109-
110108
@functools.wraps(func)
111109
def wrapper(*args, **kwargs):
112110
instance = args[0] if args and (hasattr(func, "__self__") and func.__self__ is not None) else None

0 commit comments

Comments
 (0)