Skip to content

Commit

Permalink
Fix style issues reported by clang-tidy (#1167)
Browse files Browse the repository at this point in the history
  • Loading branch information
csukuangfj authored Jul 23, 2024
1 parent d32a461 commit 299f1a8
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 22 deletions.
4 changes: 2 additions & 2 deletions sherpa-onnx/csrc/jieba-lexicon.cc
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,13 @@ class JiebaLexicon::Impl {
this_sentence.push_back(blank);

if (w == "" || w == "" || w == "" || w == "") {
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
this_sentence = {};
}
} // for (const auto &w : words)

if (!this_sentence.empty()) {
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
}

return ans;
Expand Down
8 changes: 4 additions & 4 deletions sherpa-onnx/csrc/lexicon.cc
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ std::vector<TokenIDs> Lexicon::ConvertTextToTokenIdsChinese(
if (eos != -1) {
this_sentence.push_back(eos);
}
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
this_sentence = {};

if (sil != -1) {
Expand Down Expand Up @@ -283,7 +283,7 @@ std::vector<TokenIDs> Lexicon::ConvertTextToTokenIdsChinese(
if (eos != -1) {
this_sentence.push_back(eos);
}
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));

return ans;
}
Expand Down Expand Up @@ -324,7 +324,7 @@ std::vector<TokenIDs> Lexicon::ConvertTextToTokenIdsNotChinese(

if (w != ",") {
this_sentence.push_back(blank);
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
this_sentence = {};
}

Expand All @@ -348,7 +348,7 @@ std::vector<TokenIDs> Lexicon::ConvertTextToTokenIdsNotChinese(
}

if (!this_sentence.empty()) {
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
}

return ans;
Expand Down
1 change: 0 additions & 1 deletion sherpa-onnx/csrc/melo-tts-lexicon.cc
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ class MeloTtsLexicon::Impl {
std::vector<TokenIDs> ans;
TokenIDs this_sentence;

int32_t blank = token2id_.at("_");
for (const auto &w : words) {
auto ids = ConvertWordToIds(w);
if (ids.tokens.empty()) {
Expand Down
8 changes: 4 additions & 4 deletions sherpa-onnx/csrc/offline-tts-character-frontend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ std::vector<TokenIDs> OfflineTtsCharacterFrontend::ConvertTextToTokenIds(
this_sentence.push_back(eos_id);
}

ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
this_sentence = {};

// re-initialize this_sentence
Expand All @@ -152,7 +152,7 @@ std::vector<TokenIDs> OfflineTtsCharacterFrontend::ConvertTextToTokenIds(
}

if (static_cast<int32_t>(this_sentence.size()) > 1 + use_eos_bos) {
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
}
} else {
// not adding blank
Expand All @@ -171,7 +171,7 @@ std::vector<TokenIDs> OfflineTtsCharacterFrontend::ConvertTextToTokenIds(
this_sentence.push_back(eos_id);
}

ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
this_sentence = {};

// re-initialize this_sentence
Expand All @@ -182,7 +182,7 @@ std::vector<TokenIDs> OfflineTtsCharacterFrontend::ConvertTextToTokenIds(
}

if (this_sentence.size() > 1) {
ans.push_back(std::move(this_sentence));
ans.emplace_back(std::move(this_sentence));
}
}

Expand Down
10 changes: 5 additions & 5 deletions sherpa-onnx/csrc/offline-tts-frontend.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ namespace sherpa_onnx {
struct TokenIDs {
TokenIDs() = default;

/*implicit*/ TokenIDs(const std::vector<int64_t> &tokens) // NOLINT
: tokens{tokens} {}
/*implicit*/ TokenIDs(std::vector<int64_t> tokens) // NOLINT
: tokens{std::move(tokens)} {}

TokenIDs(const std::vector<int64_t> &tokens,
const std::vector<int64_t> &tones)
: tokens{tokens}, tones{tones} {}
TokenIDs(std::vector<int64_t> tokens, // NOLINT
std::vector<int64_t> tones) // NOLINT
: tokens{std::move(tokens)}, tones{std::move(tones)} {}

std::string ToString() const;

Expand Down
8 changes: 4 additions & 4 deletions sherpa-onnx/csrc/onnx-utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ Ort::Value View(Ort::Value *v) {

float ComputeSum(const Ort::Value *v, int32_t n /*= -1*/) {
std::vector<int64_t> shape = v->GetTensorTypeAndShapeInfo().GetShape();
auto size = static_cast<int32_t>(std::accumulate(
shape.begin(), shape.end(), 1, std::multiplies<int64_t>()));
auto size = static_cast<int32_t>(
std::accumulate(shape.begin(), shape.end(), 1, std::multiplies<>()));
if (n != -1 && n < size && n > 0) {
size = n;
}
Expand All @@ -170,8 +170,8 @@ float ComputeSum(const Ort::Value *v, int32_t n /*= -1*/) {

float ComputeMean(const Ort::Value *v, int32_t n /*= -1*/) {
std::vector<int64_t> shape = v->GetTensorTypeAndShapeInfo().GetShape();
auto size = static_cast<int32_t>(std::accumulate(
shape.begin(), shape.end(), 1, std::multiplies<int64_t>()));
auto size = static_cast<int32_t>(
std::accumulate(shape.begin(), shape.end(), 1, std::multiplies<>()));

if (n != -1 && n < size && n > 0) {
size = n;
Expand Down
4 changes: 2 additions & 2 deletions sherpa-onnx/csrc/piper-phonemize-lexicon.cc
Original file line number Diff line number Diff line change
Expand Up @@ -239,12 +239,12 @@ std::vector<TokenIDs> PiperPhonemizeLexicon::ConvertTextToTokenIds(
if (meta_data_.is_piper || meta_data_.is_icefall) {
for (const auto &p : phonemes) {
phoneme_ids = PiperPhonemesToIds(token2id_, p);
ans.push_back(std::move(phoneme_ids));
ans.emplace_back(std::move(phoneme_ids));
}
} else if (meta_data_.is_coqui) {
for (const auto &p : phonemes) {
phoneme_ids = CoquiPhonemesToIds(token2id_, p, meta_data_);
ans.push_back(std::move(phoneme_ids));
ans.emplace_back(std::move(phoneme_ids));
}

} else {
Expand Down

0 comments on commit 299f1a8

Please sign in to comment.