Skip to content

Commit

Permalink
Back out "printf -> fmt::print in files inc faiss/IndexBinaryHNSW.cpp" (
Browse files Browse the repository at this point in the history
facebookresearch#3164)

Summary:
Pull Request resolved: facebookresearch#3164

The original diff breaks the open-source Faiss compiles.

Original commit changeset: 230540b26ec8

Original Phabricator Diff: D51486397

Reviewed By: algoriddle

Differential Revision: D51938993

fbshipit-source-id: a57433c4267493d2fe2249e8f4191612c0f1da59
  • Loading branch information
mdouze authored and facebook-github-bot committed Dec 7, 2023
1 parent 131adc5 commit 5b6c4b4
Show file tree
Hide file tree
Showing 10 changed files with 91 additions and 122 deletions.
21 changes: 9 additions & 12 deletions faiss/IndexBinaryHNSW.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
#include <faiss/utils/Heap.h>
#include <faiss/utils/hamming.h>
#include <faiss/utils/random.h>
#include <fmt/core.h>

namespace faiss {

Expand All @@ -52,18 +51,17 @@ void hnsw_add_vertices(
size_t ntotal = n0 + n;
double t0 = getmillisecs();
if (verbose) {
fmt::print(
"hnsw_add_vertices: adding {} elements on top of {} "
"(preset_levels={})\n",
n,
n0,
int(preset_levels));
printf("hnsw_add_vertices: adding %zd elements on top of %zd "
"(preset_levels=%d)\n",
n,
n0,
int(preset_levels));
}

int max_level = hnsw.prepare_level_tab(n, preset_levels);

if (verbose) {
fmt::print(" max_level = {}\n", max_level);
printf(" max_level = %d\n", max_level);
}

std::vector<omp_lock_t> locks(ntotal);
Expand Down Expand Up @@ -110,8 +108,7 @@ void hnsw_add_vertices(
int i0 = i1 - hist[pt_level];

if (verbose) {
fmt::print(
"Adding {} elements at level {}\n", i1 - i0, pt_level);
printf("Adding %d elements at level %d\n", i1 - i0, pt_level);
}

// random permutation to get rid of dataset order bias
Expand All @@ -138,7 +135,7 @@ void hnsw_add_vertices(

if (prev_display >= 0 && i - i0 > prev_display + 10000) {
prev_display = i - i0;
fmt::print(" {} / {}\r", i - i0, i1 - i0);
printf(" %d / %d\r", i - i0, i1 - i0);
fflush(stdout);
}
}
Expand All @@ -148,7 +145,7 @@ void hnsw_add_vertices(
FAISS_ASSERT(i1 == 0);
}
if (verbose) {
fmt::print("Done in {:.3f} ms\n", getmillisecs() - t0);
printf("Done in %.3f ms\n", getmillisecs() - t0);
}

for (int i = 0; i < ntotal; i++)
Expand Down
7 changes: 3 additions & 4 deletions faiss/IndexBinaryHash.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
#include <faiss/impl/AuxIndexStructures.h>
#include <faiss/impl/FaissAssert.h>
#include <faiss/impl/platform_macros.h>
#include <fmt/core.h>

namespace faiss {

Expand Down Expand Up @@ -269,12 +268,12 @@ size_t IndexBinaryHash::hashtable_size() const {

void IndexBinaryHash::display() const {
for (auto it = invlists.begin(); it != invlists.end(); ++it) {
fmt::print("%" PRId64 ": [", it->first);
printf("%" PRId64 ": [", it->first);
const std::vector<idx_t>& v = it->second.ids;
for (auto x : v) {
fmt::print("%" PRId64 " ", x);
printf("%" PRId64 " ", x);
}
fmt::print("]\n");
printf("]\n");
}
}

Expand Down
22 changes: 9 additions & 13 deletions faiss/IndexBinaryIVF.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
#include <faiss/utils/hamming.h>
#include <faiss/utils/sorting.h>
#include <faiss/utils/utils.h>
#include <fmt/core.h>

namespace faiss {

Expand Down Expand Up @@ -88,11 +87,10 @@ void IndexBinaryIVF::add_core(
n_add++;
}
if (verbose) {
fmt::print(
"IndexBinaryIVF::add_with_ids: added "
"%" PRId64 " / %" PRId64 " vectors\n",
n_add,
n);
printf("IndexBinaryIVF::add_with_ids: added "
"%" PRId64 " / %" PRId64 " vectors\n",
n_add,
n);
}
ntotal += n_add;
}
Expand Down Expand Up @@ -235,17 +233,16 @@ size_t IndexBinaryIVF::remove_ids(const IDSelector& sel) {

void IndexBinaryIVF::train(idx_t n, const uint8_t* x) {
if (verbose) {
fmt::print("Training quantizer\n");
printf("Training quantizer\n");
}

if (quantizer->is_trained && (quantizer->ntotal == nlist)) {
if (verbose) {
fmt::print("IVF quantizer does not need training.\n");
printf("IVF quantizer does not need training.\n");
}
} else {
if (verbose) {
fmt::print(
"Training quantizer on %" PRId64 " vectors in {}D\n", n, d);
printf("Training quantizer on %" PRId64 " vectors in %dD\n", n, d);
}

Clustering clus(d, nlist, cp);
Expand All @@ -254,9 +251,8 @@ void IndexBinaryIVF::train(idx_t n, const uint8_t* x) {
IndexFlatL2 index_tmp(d);

if (clustering_index && verbose) {
fmt::print(
"using clustering_index of dimension {} to do the clustering\n",
clustering_index->d);
printf("using clustering_index of dimension %d to do the clustering\n",
clustering_index->d);
}

// LSH codec that is able to convert the binary vectors to floats.
Expand Down
3 changes: 1 addition & 2 deletions faiss/IndexFastScan.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
#include <faiss/impl/pq4_fast_scan.h>
#include <faiss/impl/simd_result_handlers.h>
#include <faiss/utils/quantize_lut.h>
#include <fmt/core.h>

namespace faiss {

Expand Down Expand Up @@ -74,7 +73,7 @@ void IndexFastScan::add(idx_t n, const float* x) {
for (idx_t i0 = 0; i0 < n; i0 += bs) {
idx_t i1 = std::min(n, i0 + bs);
if (verbose) {
fmt::print("IndexFastScan::add {}/{}\n", size_t(i1), size_t(n));
printf("IndexFastScan::add %zd/%zd\n", size_t(i1), size_t(n));
}
add(i1 - i0, x + i0 * d);
}
Expand Down
37 changes: 16 additions & 21 deletions faiss/IndexHNSW.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
#include <faiss/utils/distances.h>
#include <faiss/utils/random.h>
#include <faiss/utils/sorting.h>
#include <fmt/core.h>

extern "C" {

Expand Down Expand Up @@ -135,12 +134,11 @@ void hnsw_add_vertices(
size_t ntotal = n0 + n;
double t0 = getmillisecs();
if (verbose) {
fmt::print(
"hnsw_add_vertices: adding {} elements on top of {} "
"(preset_levels={})\n",
n,
n0,
int(preset_levels));
printf("hnsw_add_vertices: adding %zd elements on top of %zd "
"(preset_levels=%d)\n",
n,
n0,
int(preset_levels));
}

if (n == 0) {
Expand All @@ -150,7 +148,7 @@ void hnsw_add_vertices(
int max_level = hnsw.prepare_level_tab(n, preset_levels);

if (verbose) {
fmt::print(" max_level = {}\n", max_level);
printf(" max_level = %d\n", max_level);
}

std::vector<omp_lock_t> locks(ntotal);
Expand Down Expand Up @@ -198,8 +196,7 @@ void hnsw_add_vertices(
int i0 = i1 - hist[pt_level];

if (verbose) {
fmt::print(
"Adding {} elements at level {}\n", i1 - i0, pt_level);
printf("Adding %d elements at level %d\n", i1 - i0, pt_level);
}

// random permutation to get rid of dataset order bias
Expand Down Expand Up @@ -235,7 +232,7 @@ void hnsw_add_vertices(

if (prev_display >= 0 && i - i0 > prev_display + 10000) {
prev_display = i - i0;
fmt::print(" {} / {}\r", i - i0, i1 - i0);
printf(" %d / %d\r", i - i0, i1 - i0);
fflush(stdout);
}
if (counter % check_period == 0) {
Expand All @@ -254,7 +251,7 @@ void hnsw_add_vertices(
FAISS_ASSERT(i1 == 0);
}
if (verbose) {
fmt::print("Done in {:.3f} ms\n", getmillisecs() - t0);
printf("Done in %.3f ms\n", getmillisecs() - t0);
}

for (int i = 0; i < ntotal; i++) {
Expand Down Expand Up @@ -541,13 +538,13 @@ void IndexHNSW::init_level_0_from_entry_points(
*dis, pt_id, nearest, (*dis)(nearest), 0, locks.data(), vt);

if (verbose && i % 10000 == 0) {
fmt::print(" {} / {}\r", i, n);
printf(" %d / %d\r", i, n);
fflush(stdout);
}
}
}
if (verbose) {
fmt::print("\n");
printf("\n");
}

for (int i = 0; i < ntotal; i++)
Expand Down Expand Up @@ -589,7 +586,7 @@ void IndexHNSW::reorder_links() {
}

void IndexHNSW::link_singletons() {
fmt::print("search for singletons\n");
printf("search for singletons\n");

std::vector<bool> seen(ntotal);

Expand All @@ -614,12 +611,10 @@ void IndexHNSW::link_singletons() {
}
}

fmt::print(
" Found {} / %" PRId64
" singletons ({} appear in a level above)\n",
n_sing,
ntotal,
n_sing_l1);
printf(" Found %d / %" PRId64 " singletons (%d appear in a level above)\n",
n_sing,
ntotal,
n_sing_l1);

std::vector<float> recons(singletons.size() * d);
for (int i = 0; i < singletons.size(); i++) {
Expand Down
43 changes: 19 additions & 24 deletions faiss/IndexIVF.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
#include <faiss/impl/CodePacker.h>
#include <faiss/impl/FaissAssert.h>
#include <faiss/impl/IDSelector.h>
#include <fmt/core.h>

namespace faiss {

Expand Down Expand Up @@ -63,19 +62,18 @@ void Level1Quantizer::train_q1(
size_t d = quantizer->d;
if (quantizer->is_trained && (quantizer->ntotal == nlist)) {
if (verbose)
fmt::print("IVF quantizer does not need training.\n");
printf("IVF quantizer does not need training.\n");
} else if (quantizer_trains_alone == 1) {
if (verbose)
fmt::print("IVF quantizer trains alone...\n");
printf("IVF quantizer trains alone...\n");
quantizer->train(n, x);
quantizer->verbose = verbose;
FAISS_THROW_IF_NOT_MSG(
quantizer->ntotal == nlist,
"nlist not consistent with quantizer size");
} else if (quantizer_trains_alone == 0) {
if (verbose)
fmt::print(
"Training level-1 quantizer on {} vectors in {}D\n", n, d);
printf("Training level-1 quantizer on %zd vectors in %zdD\n", n, d);

Clustering clus(d, nlist, cp);
quantizer->reset();
Expand All @@ -88,11 +86,10 @@ void Level1Quantizer::train_q1(
quantizer->is_trained = true;
} else if (quantizer_trains_alone == 2) {
if (verbose) {
fmt::print(
"Training L2 quantizer on {} vectors in {}D{}\n",
n,
d,
clustering_index ? "(user provided index)" : "");
printf("Training L2 quantizer on %zd vectors in %zdD%s\n",
n,
d,
clustering_index ? "(user provided index)" : "");
}
// also accept spherical centroids because in that case
// L2 and IP are equivalent
Expand All @@ -108,11 +105,11 @@ void Level1Quantizer::train_q1(
clus.train(n, x, *clustering_index);
}
if (verbose) {
fmt::print("Adding centroids to quantizer\n");
printf("Adding centroids to quantizer\n");
}
if (!quantizer->is_trained) {
if (verbose) {
fmt::print("But training it first on centroids table...\n");
printf("But training it first on centroids table...\n");
}
quantizer->train(nlist, clus.centroids.data());
}
Expand Down Expand Up @@ -213,10 +210,9 @@ void IndexIVF::add_core(
for (idx_t i0 = 0; i0 < n; i0 += bs) {
idx_t i1 = std::min(n, i0 + bs);
if (verbose) {
fmt::print(
" IndexIVF::add_with_ids %" PRId64 ":%" PRId64 "\n",
i0,
i1);
printf(" IndexIVF::add_with_ids %" PRId64 ":%" PRId64 "\n",
i0,
i1);
}
add_core(
i1 - i0,
Expand Down Expand Up @@ -265,11 +261,10 @@ void IndexIVF::add_core(
}

if (verbose) {
fmt::print(
" added {} / %" PRId64 " vectors ({} -1s)\n",
nadd,
n,
nminus1);
printf(" added %zd / %" PRId64 " vectors (%zd -1s)\n",
nadd,
n,
nminus1);
}

ntotal += n;
Expand Down Expand Up @@ -1133,13 +1128,13 @@ void IndexIVF::update_vectors(int n, const idx_t* new_ids, const float* x) {

void IndexIVF::train(idx_t n, const float* x) {
if (verbose) {
fmt::print("Training level-1 quantizer\n");
printf("Training level-1 quantizer\n");
}

train_q1(n, x, verbose, metric_type);

if (verbose) {
fmt::print("Training IVF residual\n");
printf("Training IVF residual\n");
}

// optional subsampling
Expand Down Expand Up @@ -1176,7 +1171,7 @@ void IndexIVF::train_encoder(
const idx_t* assign) {
// does nothing by default
if (verbose) {
fmt::print("IndexIVF: no residual training\n");
printf("IndexIVF: no residual training\n");
}
}

Expand Down
Loading

0 comments on commit 5b6c4b4

Please sign in to comment.