Skip to content

Commit 1e27123

Browse files
cyyeverRocketknight1
authored andcommitted
Remove repeated import
Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
1 parent 4df2529 commit 1e27123

File tree

7 files changed

+0
-13
lines changed

7 files changed

+0
-13
lines changed

src/transformers/data/data_collator.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -716,8 +716,6 @@ def get_generator(self, seed):
716716

717717
return torch.Generator().manual_seed(seed)
718718
else:
719-
import numpy as np
720-
721719
return np.random.default_rng(seed)
722720

723721
def create_rng(self):

src/transformers/integrations/integration_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -544,8 +544,6 @@ def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> Be
544544

545545

546546
def run_hp_search_wandb(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
547-
from ..integrations import is_wandb_available
548-
549547
if not is_wandb_available():
550548
raise ImportError("This function needs wandb installed: `pip install wandb`")
551549
import wandb

src/transformers/integrations/tensor_parallel.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,8 +1103,6 @@ def distribute_model(model, distributed_config, device_mesh, tp_size):
11031103
raise ValueError(f"Unsupported tensor parallel style {v}. Supported styles are {ALL_PARALLEL_STYLES}")
11041104
for name, module in model.named_modules():
11051105
if not getattr(module, "_is_hooked", False):
1106-
from transformers.integrations.tensor_parallel import add_tensor_parallel_hooks_to_module
1107-
11081106
plan = _get_parameter_tp_plan(parameter_name=name, tp_plan=model_plan, is_weight=False)
11091107
add_tensor_parallel_hooks_to_module(
11101108
model=model,

src/transformers/modeling_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2242,8 +2242,6 @@ def tp_plan(self, plan: dict[str, str]):
22422242
flexible_matched = True
22432243
break
22442244
if not flexible_matched:
2245-
import warnings
2246-
22472245
warnings.warn(
22482246
f"Layer pattern '{layer_pattern}' does not match any parameters in the model. "
22492247
f"This rule may not be applied during tensor parallelization."

src/transformers/testing_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2805,8 +2805,6 @@ def wrapper(*args, **kwargs):
28052805
else:
28062806
test = " ".join(os.environ.get("PYTEST_CURRENT_TEST").split(" ")[:-1])
28072807
try:
2808-
import copy
2809-
28102808
env = copy.deepcopy(os.environ)
28112809
env["_INSIDE_SUB_PROCESS"] = "1"
28122810
# This prevents the entries in `short test summary info` given by the subprocess being truncated. so the

src/transformers/utils/hub.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1084,7 +1084,6 @@ def get_checkpoint_shard_files(
10841084
For the description of each arg, see [`PreTrainedModel.from_pretrained`]. `index_filename` is the full path to the
10851085
index (downloaded and cached if `pretrained_model_name_or_path` is a model ID on the Hub).
10861086
"""
1087-
import json
10881087

10891088
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
10901089
if use_auth_token is not None:

src/transformers/utils/metrics.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,6 @@ def decorator(func):
105105
if not _has_opentelemetry:
106106
return func
107107

108-
import functools
109-
110108
@functools.wraps(func)
111109
def wrapper(*args, **kwargs):
112110
instance = args[0] if args and (hasattr(func, "__self__") and func.__self__ is not None) else None

0 commit comments

Comments
 (0)