Skip to content

Commit

Permalink
apply isort
Browse files Browse the repository at this point in the history
  • Loading branch information
kamo-naoyuki committed May 18, 2022
1 parent d0f2eac commit 4203c9c
Show file tree
Hide file tree
Showing 157 changed files with 1,090 additions and 863 deletions.
13 changes: 9 additions & 4 deletions egs/vcc20/tts1_en_fi/local/clean_text_css10.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,15 @@
import os

import nltk
from tacotron_cleaner.cleaners import (collapse_whitespace,
expand_abbreviations, expand_numbers,
expand_symbols, lowercase,
remove_unnecessary_symbols, uppercase)
from tacotron_cleaner.cleaners import (
collapse_whitespace,
expand_abbreviations,
expand_numbers,
expand_symbols,
lowercase,
remove_unnecessary_symbols,
uppercase,
)

try:
# For phoneme conversion, use https://github.com/Kyubyong/g2p.
Expand Down
15 changes: 10 additions & 5 deletions egs/vcc20/vc1_task2/local/clean_text_finnish.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,16 @@
import codecs

import nltk
from tacotron_cleaner.cleaners import (collapse_whitespace,
custom_english_cleaners,
expand_abbreviations, expand_numbers,
expand_symbols, lowercase,
remove_unnecessary_symbols, uppercase)
from tacotron_cleaner.cleaners import (
collapse_whitespace,
custom_english_cleaners,
expand_abbreviations,
expand_numbers,
expand_symbols,
lowercase,
remove_unnecessary_symbols,
uppercase,
)

E_lang_tag = "en_US"

Expand Down
3 changes: 1 addition & 2 deletions egs2/TEMPLATE/ssl1/pyscripts/dump_km_label.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
import numpy as np
import torch
import tqdm
from sklearn_km import (HubertFeatureReader, MfccFeatureReader,
get_path_iterator)
from sklearn_km import HubertFeatureReader, MfccFeatureReader, get_path_iterator

logging.basicConfig(
level=logging.DEBUG,
Expand Down
2 changes: 1 addition & 1 deletion egs2/TEMPLATE/ssl1/pyscripts/feature_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# Paper: https://arxiv.org/pdf/2106.07447.pdf
# Code in Fairseq: https://github.com/pytorch/fairseq/tree/master/examples/hubert

"""Extract MFCC & intermediate embedding from the Hubert model for k-means clustering."""
"""Extract MFCC & intermediate embedding from the Hubert model for k-means clustering"""

import logging
import os
Expand Down
3 changes: 1 addition & 2 deletions egs2/aishell4/enh1/local/split_train_dev_by_column.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from collections import defaultdict
from pathlib import Path

from split_train_dev import (int_or_float_or_numstr, split_train_dev,
split_train_dev_v2)
from split_train_dev import int_or_float_or_numstr, split_train_dev, split_train_dev_v2


def get_parser():
Expand Down
3 changes: 1 addition & 2 deletions egs2/aishell4/enh1/local/split_train_dev_by_prefix.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from collections import defaultdict
from pathlib import Path

from split_train_dev import (int_or_float_or_numstr, split_train_dev,
split_train_dev_v2)
from split_train_dev import int_or_float_or_numstr, split_train_dev, split_train_dev_v2


def get_parser():
Expand Down
9 changes: 6 additions & 3 deletions egs2/seame/asr1/local/split_lang_trn.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,12 @@
import argparse
import os

from preprocess import (extract_mandarin_only, extract_non_mandarin,
insert_space_between_mandarin,
remove_redundant_whitespaces)
from preprocess import (
extract_mandarin_only,
extract_non_mandarin,
insert_space_between_mandarin,
remove_redundant_whitespaces,
)

if __name__ == "__main__":
# Parse arguments
Expand Down
21 changes: 14 additions & 7 deletions espnet/asr/chainer_backend/asr.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,27 @@
# rnnlm
import espnet.lm.chainer_backend.extlm as extlm_chainer
import espnet.lm.chainer_backend.lm as lm_chainer

# espnet related
from espnet.asr.asr_utils import (CompareValueTrigger, adadelta_eps_decay,
add_results_to_json, chainer_load,
get_model_conf, restore_snapshot)
from espnet.asr.asr_utils import (
CompareValueTrigger,
adadelta_eps_decay,
add_results_to_json,
chainer_load,
get_model_conf,
restore_snapshot,
)
from espnet.nets.asr_interface import ASRInterface
from espnet.utils.deterministic_utils import set_deterministic_chainer
from espnet.utils.dynamic_import import dynamic_import
from espnet.utils.io_utils import LoadInputsAndTargets
from espnet.utils.training.batchfy import make_batchset
from espnet.utils.training.evaluator import BaseEvaluator
from espnet.utils.training.iterators import (
ShufflingEnabler, ToggleableShufflingMultiprocessIterator,
ToggleableShufflingSerialIterator)
ShufflingEnabler,
ToggleableShufflingMultiprocessIterator,
ToggleableShufflingSerialIterator,
)
from espnet.utils.training.tensorboard_logger import TensorboardLogger
from espnet.utils.training.train_utils import check_early_stop, set_early_stop

Expand Down Expand Up @@ -270,8 +278,7 @@ def train(args):
trigger=(args.sortagrad if args.sortagrad != -1 else args.epochs, "epoch"),
)
if args.opt == "noam":
from espnet.nets.chainer_backend.transformer.training import \
VaswaniRule
from espnet.nets.chainer_backend.transformer.training import VaswaniRule

trainer.extend(
VaswaniRule(
Expand Down
29 changes: 19 additions & 10 deletions espnet/asr/pytorch_backend/asr.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,24 @@

import espnet.lm.pytorch_backend.extlm as extlm_pytorch
import espnet.nets.pytorch_backend.lm.default as lm_pytorch
from espnet.asr.asr_utils import (CompareValueTrigger, adadelta_eps_decay,
add_results_to_json, format_mulenc_args,
get_model_conf, plot_spectrogram,
restore_snapshot, snapshot_object,
torch_load, torch_resume, torch_snapshot)
from espnet.asr.pytorch_backend.asr_init import (freeze_modules,
load_trained_model,
load_trained_modules)
from espnet.asr.asr_utils import (
CompareValueTrigger,
adadelta_eps_decay,
add_results_to_json,
format_mulenc_args,
get_model_conf,
plot_spectrogram,
restore_snapshot,
snapshot_object,
torch_load,
torch_resume,
torch_snapshot,
)
from espnet.asr.pytorch_backend.asr_init import (
freeze_modules,
load_trained_model,
load_trained_modules,
)
from espnet.nets.asr_interface import ASRInterface
from espnet.nets.beam_search_transducer import BeamSearchTransducer
from espnet.nets.pytorch_backend.e2e_asr import pad_list
Expand Down Expand Up @@ -500,8 +510,7 @@ def train(args):
elif args.opt == "adam":
optimizer = torch.optim.Adam(model_params, weight_decay=args.weight_decay)
elif args.opt == "noam":
from espnet.nets.pytorch_backend.transformer.optimizer import \
get_std_opt
from espnet.nets.pytorch_backend.transformer.optimizer import get_std_opt

if "transducer" in mtl_mode:
if args.noam_adim > 0:
Expand Down
25 changes: 17 additions & 8 deletions espnet/asr/pytorch_backend/asr_mix.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,29 @@

import numpy as np
import torch

# chainer related
from chainer import training
from chainer.training import extensions

import espnet.lm.pytorch_backend.extlm as extlm_pytorch
import espnet.nets.pytorch_backend.lm.default as lm_pytorch
from espnet.asr.asr_mix_utils import add_results_to_json
from espnet.asr.asr_utils import (CompareValueTrigger, adadelta_eps_decay,
get_model_conf, restore_snapshot,
snapshot_object, torch_load, torch_resume,
torch_snapshot)
from espnet.asr.pytorch_backend.asr import (CustomEvaluator, CustomUpdater,
load_trained_model)
from espnet.asr.asr_utils import (
CompareValueTrigger,
adadelta_eps_decay,
get_model_conf,
restore_snapshot,
snapshot_object,
torch_load,
torch_resume,
torch_snapshot,
)
from espnet.asr.pytorch_backend.asr import (
CustomEvaluator,
CustomUpdater,
load_trained_model,
)
from espnet.nets.asr_interface import ASRInterface
from espnet.nets.pytorch_backend.e2e_asr_mix import pad_list
from espnet.utils.dataset import ChainerDataLoader, TransformDataset
Expand Down Expand Up @@ -217,8 +227,7 @@ def train(args):
elif args.opt == "adam":
optimizer = torch.optim.Adam(model.parameters(), weight_decay=args.weight_decay)
elif args.opt == "noam":
from espnet.nets.pytorch_backend.transformer.optimizer import \
get_std_opt
from espnet.nets.pytorch_backend.transformer.optimizer import get_std_opt

optimizer = get_std_opt(
model.parameters(),
Expand Down
3 changes: 1 addition & 2 deletions espnet/asr/pytorch_backend/recog.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@
import torch
from packaging.version import parse as V

from espnet.asr.asr_utils import (add_results_to_json, get_model_conf,
torch_load)
from espnet.asr.asr_utils import add_results_to_json, get_model_conf, torch_load
from espnet.asr.pytorch_backend.asr import load_trained_model
from espnet.nets.asr_interface import ASRInterface
from espnet.nets.batch_beam_search import BatchBeamSearch
Expand Down
9 changes: 7 additions & 2 deletions espnet/bin/asr_align.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,14 @@

import configargparse
import torch

# imports for CTC segmentation
from ctc_segmentation import (CtcSegmentationParameters, ctc_segmentation,
determine_utterance_segments, prepare_text)
from ctc_segmentation import (
CtcSegmentationParameters,
ctc_segmentation,
determine_utterance_segments,
prepare_text,
)

# imports for inference
from espnet.asr.pytorch_backend.asr_init import load_trained_model
Expand Down
11 changes: 8 additions & 3 deletions espnet/lm/chainer_backend/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,19 @@
import six
from chainer import link, reporter, training
from chainer.dataset import convert

# for classifier link
from chainer.functions.loss import softmax_cross_entropy
from chainer.training import extensions

import espnet.nets.chainer_backend.deterministic_embed_id as DL
from espnet.lm.lm_utils import (MakeSymlinkToBestModel,
ParallelSentenceIterator, compute_perplexity,
count_tokens, read_tokens)
from espnet.lm.lm_utils import (
MakeSymlinkToBestModel,
ParallelSentenceIterator,
compute_perplexity,
count_tokens,
read_tokens,
)
from espnet.nets.lm_interface import LMInterface
from espnet.optimizer.factory import dynamic_import_optimizer
from espnet.scheduler.chainer import ChainerScheduler
Expand Down
18 changes: 13 additions & 5 deletions espnet/lm/pytorch_backend/lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,19 @@
from chainer.training import extensions
from torch.nn.parallel import data_parallel

from espnet.asr.asr_utils import (snapshot_object, torch_load, torch_resume,
torch_snapshot)
from espnet.lm.lm_utils import (MakeSymlinkToBestModel,
ParallelSentenceIterator, count_tokens,
load_dataset, read_tokens)
from espnet.asr.asr_utils import (
snapshot_object,
torch_load,
torch_resume,
torch_snapshot,
)
from espnet.lm.lm_utils import (
MakeSymlinkToBestModel,
ParallelSentenceIterator,
count_tokens,
load_dataset,
read_tokens,
)
from espnet.nets.lm_interface import LMInterface, dynamic_import_lm
from espnet.optimizer.factory import dynamic_import_optimizer
from espnet.scheduler.pytorch import PyTorchScheduler
Expand Down
25 changes: 17 additions & 8 deletions espnet/mt/pytorch_backend/mt.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,22 @@
from chainer import training
from chainer.training import extensions

from espnet.asr.asr_utils import (CompareValueTrigger, adadelta_eps_decay,
adam_lr_decay, add_results_to_json,
restore_snapshot, snapshot_object,
torch_load, torch_resume, torch_snapshot)
from espnet.asr.pytorch_backend.asr import (CustomEvaluator, CustomUpdater,
load_trained_model)
from espnet.asr.asr_utils import (
CompareValueTrigger,
adadelta_eps_decay,
adam_lr_decay,
add_results_to_json,
restore_snapshot,
snapshot_object,
torch_load,
torch_resume,
torch_snapshot,
)
from espnet.asr.pytorch_backend.asr import (
CustomEvaluator,
CustomUpdater,
load_trained_model,
)
from espnet.nets.mt_interface import MTInterface
from espnet.nets.pytorch_backend.e2e_asr import pad_list
from espnet.utils.dataset import ChainerDataLoader, TransformDataset
Expand Down Expand Up @@ -154,8 +164,7 @@ def train(args):
model.parameters(), lr=args.lr, weight_decay=args.weight_decay
)
elif args.opt == "noam":
from espnet.nets.pytorch_backend.transformer.optimizer import \
get_std_opt
from espnet.nets.pytorch_backend.transformer.optimizer import get_std_opt

optimizer = get_std_opt(
model.parameters(),
Expand Down
3 changes: 1 addition & 2 deletions espnet/nets/beam_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import torch

from espnet.nets.e2e_asr_common import end_detect
from espnet.nets.scorer_interface import (PartialScorerInterface,
ScorerInterface)
from espnet.nets.scorer_interface import PartialScorerInterface, ScorerInterface


class Hypothesis(NamedTuple):
Expand Down
13 changes: 9 additions & 4 deletions espnet/nets/beam_search_transducer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,15 @@
from espnet.nets.pytorch_backend.transducer.joint_network import JointNetwork
from espnet.nets.pytorch_backend.transducer.rnn_decoder import RNNDecoder
from espnet.nets.pytorch_backend.transducer.utils import (
create_lm_batch_states, init_lm_state, is_prefix, recombine_hyps,
select_k_expansions, select_lm_state, subtract)
from espnet.nets.transducer_decoder_interface import (ExtendedHypothesis,
Hypothesis)
create_lm_batch_states,
init_lm_state,
is_prefix,
recombine_hyps,
select_k_expansions,
select_lm_state,
subtract,
)
from espnet.nets.transducer_decoder_interface import ExtendedHypothesis, Hypothesis


class BeamSearchTransducer:
Expand Down
1 change: 1 addition & 0 deletions espnet/nets/chainer_backend/deterministic_embed_id.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import chainer
import numpy
import six

# from chainer.functions.connection import embed_id
from chainer import cuda, function_node, link, variable
from chainer.initializers import normal
Expand Down
3 changes: 1 addition & 2 deletions espnet/nets/chainer_backend/e2e_asr.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,7 @@ def custom_updater(iters, optimizer, converter, device=-1, accum_grad=1):
@staticmethod
def custom_parallel_updater(iters, optimizer, converter, devices, accum_grad=1):
"""Get custom_parallel_updater of the model."""
from espnet.nets.chainer_backend.rnn.training import \
CustomParallelUpdater
from espnet.nets.chainer_backend.rnn.training import CustomParallelUpdater

return CustomParallelUpdater(
iters,
Expand Down
Loading

0 comments on commit 4203c9c

Please sign in to comment.