Skip to content

Commit 4e74292

Browse files
committed
Remove repeated import
Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
1 parent 48a5565 commit 4e74292

File tree

7 files changed

+0
-13
lines changed

7 files changed

+0
-13
lines changed

src/transformers/data/data_collator.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -869,8 +869,6 @@ def get_generator(self, seed):
869869

870870
return tf.random.Generator.from_seed(seed)
871871
else:
872-
import numpy as np
873-
874872
return np.random.default_rng(seed)
875873

876874
def create_rng(self):

src/transformers/integrations/integration_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -547,8 +547,6 @@ def run_hp_search_sigopt(trainer, n_trials: int, direction: str, **kwargs) -> Be
547547

548548

549549
def run_hp_search_wandb(trainer, n_trials: int, direction: str, **kwargs) -> BestRun:
550-
from ..integrations import is_wandb_available
551-
552550
if not is_wandb_available():
553551
raise ImportError("This function needs wandb installed: `pip install wandb`")
554552
import wandb

src/transformers/integrations/tensor_parallel.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,8 +1103,6 @@ def distribute_model(model, distributed_config, device_mesh, tp_size):
11031103
raise ValueError(f"Unsupported tensor parallel style {v}. Supported styles are {ALL_PARALLEL_STYLES}")
11041104
for name, module in model.named_modules():
11051105
if not getattr(module, "_is_hooked", False):
1106-
from transformers.integrations.tensor_parallel import add_tensor_parallel_hooks_to_module
1107-
11081106
plan = _get_parameter_tp_plan(parameter_name=name, tp_plan=model_plan, is_weight=False)
11091107
add_tensor_parallel_hooks_to_module(
11101108
model=model,

src/transformers/modeling_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2332,8 +2332,6 @@ def tp_plan(self, plan: dict[str, str]):
23322332
flexible_matched = True
23332333
break
23342334
if not flexible_matched:
2335-
import warnings
2336-
23372335
warnings.warn(
23382336
f"Layer pattern '{layer_pattern}' does not match any parameters in the model. "
23392337
f"This rule may not be applied during tensor parallelization."

src/transformers/testing_utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2847,8 +2847,6 @@ def wrapper(*args, **kwargs):
28472847
else:
28482848
test = " ".join(os.environ.get("PYTEST_CURRENT_TEST").split(" ")[:-1])
28492849
try:
2850-
import copy
2851-
28522850
env = copy.deepcopy(os.environ)
28532851
env["_INSIDE_SUB_PROCESS"] = "1"
28542852
# This prevents the entries in `short test summary info` given by the subprocess being truncated. so the

src/transformers/utils/hub.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1086,7 +1086,6 @@ def get_checkpoint_shard_files(
10861086
For the description of each arg, see [`PreTrainedModel.from_pretrained`]. `index_filename` is the full path to the
10871087
index (downloaded and cached if `pretrained_model_name_or_path` is a model ID on the Hub).
10881088
"""
1089-
import json
10901089

10911090
use_auth_token = deprecated_kwargs.pop("use_auth_token", None)
10921091
if use_auth_token is not None:

src/transformers/utils/metrics.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,6 @@ def decorator(func):
105105
if not _has_opentelemetry:
106106
return func
107107

108-
import functools
109-
110108
@functools.wraps(func)
111109
def wrapper(*args, **kwargs):
112110
instance = args[0] if args and (hasattr(func, "__self__") and func.__self__ is not None) else None

0 commit comments

Comments
 (0)