Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable the users to disable optional dependencies #1074

Closed
wants to merge 8 commits into from
24 changes: 20 additions & 4 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,18 @@ project( Caffe )
### Build Options ##########################################################################

option(CPU_ONLY "Build Caffe without GPU support" OFF)

option(WITH_CUDA "Include NVIDIA CUDA support" ON)
option(WITH_CUDNN "Include NVIDIA CUDNN support" OFF)
option(WITH_HDF5 "Include HDF5 support" ON)
option(WITH_LEVELDB "Include LEVELDB support" ON)
option(WITH_LMDB "Include LMDB support" ON)

option(BUILD_EXAMPLES "Build examples" ON)
option(BUILD_PYTHON "Build Python wrapper" OFF)
option(BUILD_MATLAB "Build Matlab wrapper" OFF)
option(BUILD_EXAMPLES "Build examples" ON)
option(BUILD_SHARED_LIBS "Build SHARED libs if ON and STATIC otherwise" OFF)
option(BUILD_TESTS "Build tests" ON)

if(NOT BLAS)
set(BLAS atlas)
Expand All @@ -31,7 +39,8 @@ set(CMAKE_CXX_FLAGS_RELEASE ${CMAKE_CXX_FLAGS_RELEASE}) # set release flags

# Global Definitions
if(CPU_ONLY)
add_definitions(-DCPU_ONLY)
message("CPU_ONLY is deprecated, use WITH_CUDA=ON or WITH_CUDA=OFF")
set(WITH_CUDA OFF)
endif()

# Include Directories
Expand All @@ -46,14 +55,16 @@ set(CMAKE_SCRIPT_DIR ${CMAKE_SOURCE_DIR}/CMakeScripts)
set( CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SCRIPT_DIR})

# CUDA is required globally
if(NOT CPU_ONLY)
if(WITH_CUDA)
message(STATUS "NVIDIA CUDA enabled")
find_package(CUDA 5.5 REQUIRED)
include_directories(${CUDA_INCLUDE_DIRS})
else()
add_definitions(-DCPU_ONLY)
endif()

### Subdirectories ##########################################################################

add_subdirectory(src/gtest)
add_subdirectory(src/caffe)
add_subdirectory(tools)

Expand All @@ -72,6 +83,11 @@ if(BUILD_MATLAB)
add_subdirectory(matlab)
endif()

if(BUILD_TESTS)
message(STATUS "Tests enabled")
add_subdirectory(src/gtest)
endif()

### Lint Target Setup ##########################################################################

set(LINT_TARGET lint)
Expand Down
19 changes: 16 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,8 @@ ifneq ($(CPU_ONLY), 1)
LIBRARIES := cudart cublas curand
endif
LIBRARIES += pthread \
glog gflags protobuf leveldb snappy \
lmdb \
glog gflags protobuf \
boost_system \
hdf5_hl hdf5 \
opencv_core opencv_highgui opencv_imgproc
PYTHON_LIBRARIES := boost_python python2.7
WARNINGS := -Wall -Wno-sign-compare
Expand Down Expand Up @@ -309,6 +307,21 @@ endif
INCLUDE_DIRS += $(BLAS_INCLUDE)
LIBRARY_DIRS += $(BLAS_LIB)

ifneq ($(WITH_HDF5), 0)
COMMON_FLAGS += -DHAVE_HDF5
LIBRARIES += hdf5_hl hdf5
endif

ifneq ($(WITH_LMDB), 0)
COMMON_FLAGS += -DHAVE_LEVELDB
LIBRARIES += leveldb snappy
endif

ifneq ($(WITH_LMDB), 0)
COMMON_FLAGS += -DHAVE_LMDB
LIBRARIES += lmdb
endif

# Complete build flags.
COMMON_FLAGS += $(foreach includedir,$(INCLUDE_DIRS),-I$(includedir))
CXXFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
Expand Down
25 changes: 21 additions & 4 deletions docs/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,28 @@ We have installed Caffe on Ubuntu 14.04, Ubuntu 12.04, OS X 10.9, and OS X 10.8.

Caffe depends on several software packages.

* [CUDA](https://developer.nvidia.com/cuda-zone) library version 6.5 (recommended), 6.0, 5.5, or 5.0 and the latest driver version for CUDA 6 or 319.* for CUDA 5 (and NOT 331.*)
* [BLAS](http://en.wikipedia.org/wiki/Basic_Linear_Algebra_Subprograms) (provided via ATLAS, MKL, or OpenBLAS).
* [OpenCV](http://opencv.org/).
#### Required

* [BLAS](http://en.wikipedia.org/wiki/Basic_Linear_Algebra_Subprograms) (provided via ATLAS, MKL, or OpenBLAS)
* [Boost](http://www.boost.org/) (>= 1.55, although only 1.55 is tested)
* `glog`, `gflags`, `protobuf`, `leveldb`, `snappy`, `hdf5`, `lmdb`
* [gflags](http://code.google.com/p/gflags/)
* [glog](http://code.google.com/p/google-glog/)
* [OpenCV](http://opencv.org/)
* [Protocol Buffers](http://code.google.com/p/protobuf/)

#### Optional

* [CUDA](https://developer.nvidia.com/cuda-zone) library version 6.5 (recommended), 6.0, 5.5, or 5.0 and the latest driver version for CUDA 6 or 319.* for CUDA 5 (and NOT 331.*)
+ Enabled by default. You can disable it with the CMake flag: -DWITH_CUDA=OFF
* [cuDNN](https://developer.nvidia.com/cuDNN)
+ Disabled by default. You can enable it with the CMake flag: -DWITH_CUDNN=ON
* [HDF5](http://www.hdfgroup.org/HDF5/)
+ Enabled by default. You can disable it with the CMake flag: -DWITH_HDF5=OFF
* [LevelDB](http://code.google.com/p/leveldb/) and its dependency [snappy](http://code.google.com/p/snappy/).
+ Enabled by default. You can disable it with the CMake flag: -DWITH_LEVELDB=OFF
* [LMDB](http://symas.com/mdb/)
+ Enabled by default. You can disable it with the CMake flag: -DWITH_LDMB=OFF

* For the Python wrapper
* `Python 2.7`, `numpy (>= 1.7)`, boost-provided `boost.python`
* For the MATLAB wrapper
Expand Down
17 changes: 17 additions & 0 deletions examples/cifar10/convert_cifar_data.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@

#include "glog/logging.h"
#include "google/protobuf/text_format.h"
#ifdef HAVE_LEVELDB
#include "leveldb/db.h"
#endif
#include "stdint.h"

#include "caffe/proto/caffe.pb.h"
Expand All @@ -32,10 +34,12 @@ void read_image(std::ifstream* file, int* label, char* buffer) {
}

void convert_dataset(const string& input_folder, const string& output_folder) {
#ifdef HAVE_LEVELDB
// Leveldb options
leveldb::Options options;
options.create_if_missing = true;
options.error_if_exists = true;
#endif
// Data buffer
int label;
char str_buffer[kCIFARImageNBytes];
Expand All @@ -46,11 +50,13 @@ void convert_dataset(const string& input_folder, const string& output_folder) {
datum.set_width(kCIFARSize);

LOG(INFO) << "Writing Training data";
#ifdef HAVE_LEVELDB
leveldb::DB* train_db;
leveldb::Status status;
status = leveldb::DB::Open(options, output_folder + "/cifar10_train_leveldb",
&train_db);
CHECK(status.ok()) << "Failed to open leveldb.";
#endif
for (int fileid = 0; fileid < kCIFARTrainBatches; ++fileid) {
// Open files
LOG(INFO) << "Training Batch " << fileid + 1;
Expand All @@ -65,14 +71,18 @@ void convert_dataset(const string& input_folder, const string& output_folder) {
datum.SerializeToString(&value);
snprintf(str_buffer, kCIFARImageNBytes, "%05d",
fileid * kCIFARBatchSize + itemid);
#ifdef HAVE_LEVELDB
train_db->Put(leveldb::WriteOptions(), string(str_buffer), value);
#endif
}
}

LOG(INFO) << "Writing Testing data";
#ifdef HAVE_LEVELDB
leveldb::DB* test_db;
CHECK(leveldb::DB::Open(options, output_folder + "/cifar10_test_leveldb",
&test_db).ok()) << "Failed to open leveldb.";
#endif
// Open files
std::ifstream data_file((input_folder + "/test_batch.bin").c_str(),
std::ios::in | std::ios::binary);
Expand All @@ -83,14 +93,21 @@ void convert_dataset(const string& input_folder, const string& output_folder) {
datum.set_data(str_buffer, kCIFARImageNBytes);
datum.SerializeToString(&value);
snprintf(str_buffer, kCIFARImageNBytes, "%05d", itemid);
#ifdef HAVE_LEVELDB
test_db->Put(leveldb::WriteOptions(), string(str_buffer), value);
#endif
}

#ifdef HAVE_LEVELDB
delete train_db;
delete test_db;
#endif
}

int main(int argc, char** argv) {
#ifndef HAVE_LEVELDB
LOG(FATAL) << "No DB library available to save the converted dataset";
#endif
if (argc != 3) {
printf("This script converts the CIFAR dataset to the leveldb format used\n"
"by caffe to perform classification.\n"
Expand Down
83 changes: 73 additions & 10 deletions examples/mnist/convert_mnist_data.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,14 @@
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <google/protobuf/text_format.h>
#ifdef HAVE_LEVELDB
#include <leveldb/db.h>
#include <leveldb/write_batch.h>
#endif

#ifdef HAVE_LMDB
#include <lmdb.h>
#endif
#include <stdint.h>
#include <sys/stat.h>

Expand All @@ -24,7 +29,13 @@
using namespace caffe; // NOLINT(build/namespaces)
using std::string;

#if defined HAVE_LMDB
DEFINE_string(backend, "lmdb", "The backend for storing the result");
#elif defined HAVE_LEVELDB
DEFINE_string(backend, "leveldb", "The backend for storing the result");
#else
DEFINE_string(backend, "", "The backend for storing the result");
#endif

uint32_t swap_endian(uint32_t val) {
val = ((val << 8) & 0xFF00FF00) | ((val >> 8) & 0xFF00FF);
Expand Down Expand Up @@ -61,27 +72,38 @@ void convert_dataset(const char* image_filename, const char* label_filename,
image_file.read(reinterpret_cast<char*>(&cols), 4);
cols = swap_endian(cols);

// lmdb
MDB_env *mdb_env;
MDB_dbi mdb_dbi;
MDB_val mdb_key, mdb_data;
MDB_txn *mdb_txn;
#ifdef HAVE_LEVELDB
// leveldb
leveldb::DB* db;
leveldb::Options options;
options.error_if_exists = true;
options.create_if_missing = true;
options.write_buffer_size = 268435456;
leveldb::WriteBatch* batch = NULL;
#endif

#ifdef HAVE_LMDB
// lmdb
MDB_env *mdb_env;
MDB_dbi mdb_dbi;
MDB_val mdb_key, mdb_data;
MDB_txn *mdb_txn;
#endif

// Open db
if (db_backend == "leveldb") { // leveldb
if (db_backend == "") {
LOG(FATAL) << "Unknown db backend " << db_backend;
#ifdef HAVE_LEVELDB
} else if (db_backend == "leveldb") { // leveldb
LOG(INFO) << "Opening leveldb " << db_path;
leveldb::Status status = leveldb::DB::Open(
options, db_path, &db);
CHECK(status.ok()) << "Failed to open leveldb " << db_path
<< ". Is it already existing?";
batch = new leveldb::WriteBatch();
#endif

#ifdef HAVE_LMDB
} else if (db_backend == "lmdb") { // lmdb
LOG(INFO) << "Opening lmdb " << db_path;
CHECK_EQ(mkdir(db_path, 0744), 0)
Expand All @@ -95,6 +117,7 @@ void convert_dataset(const char* image_filename, const char* label_filename,
<< "mdb_txn_begin failed";
CHECK_EQ(mdb_open(mdb_txn, NULL, 0, &mdb_dbi), MDB_SUCCESS)
<< "mdb_open failed. Does the lmdb already exist? ";
#endif
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}
Expand Down Expand Up @@ -123,45 +146,66 @@ void convert_dataset(const char* image_filename, const char* label_filename,
string keystr(key_cstr);

// Put in db
if (db_backend == "leveldb") { // leveldb
if (db_backend == "") {
LOG(FATAL) << "Unknown db backend " << db_backend;
#ifdef HAVE_LEVELDB
} else if (db_backend == "leveldb") { // leveldb
batch->Put(keystr, value);
#endif

#ifdef HAVE_LMDB
} else if (db_backend == "lmdb") { // lmdb
mdb_data.mv_size = value.size();
mdb_data.mv_data = reinterpret_cast<void*>(&value[0]);
mdb_key.mv_size = keystr.size();
mdb_key.mv_data = reinterpret_cast<void*>(&keystr[0]);
CHECK_EQ(mdb_put(mdb_txn, mdb_dbi, &mdb_key, &mdb_data, 0), MDB_SUCCESS)
<< "mdb_put failed";
#endif
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}

if (++count % 1000 == 0) {
// Commit txn
if (db_backend == "leveldb") { // leveldb
if (db_backend == "") {
LOG(FATAL) << "Unknown db backend " << db_backend;
#ifdef HAVE_LEVELDB
} else if (db_backend == "leveldb") { // leveldb
db->Write(leveldb::WriteOptions(), batch);
delete batch;
batch = new leveldb::WriteBatch();
#endif

#ifdef HAVE_LMDB
} else if (db_backend == "lmdb") { // lmdb
CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS)
<< "mdb_txn_commit failed";
CHECK_EQ(mdb_txn_begin(mdb_env, NULL, 0, &mdb_txn), MDB_SUCCESS)
<< "mdb_txn_begin failed";
#endif
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}
}
}
// write the last batch
if (count % 1000 != 0) {
if (db_backend == "leveldb") { // leveldb
if (db_backend == "") {
LOG(FATAL) << "Unknown db backend " << db_backend;
#ifdef HAVE_LEVELDB
} else if (db_backend == "leveldb") { // leveldb
db->Write(leveldb::WriteOptions(), batch);
delete batch;
delete db;
#endif

#ifdef HAVE_LMDB
} else if (db_backend == "lmdb") { // lmdb
CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS) << "mdb_txn_commit failed";
mdb_close(mdb_env, mdb_dbi);
mdb_env_close(mdb_env);
#endif
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}
Expand All @@ -171,12 +215,31 @@ void convert_dataset(const char* image_filename, const char* label_filename,
}

int main(int argc, char** argv) {
#ifndef HAVE_LEVELDB
#ifndef HAVE_LMDB
LOG(FATAL) << "No DB library available to save the converted dataset";
#endif
#endif

#ifndef GFLAGS_GFLAGS_H_
namespace gflags = google;
#endif

gflags::SetUsageMessage("This script converts the MNIST dataset to\n"
"the leveldb/lmdb format used by Caffe to perform classification.\n"
"the "
#ifdef HAVE_LEVELDB
"leveldb"
#ifdef HAVE_LMDB
"/lmdb"
#endif
#else
#ifdef HAVE_LMDB
"lmdb"
#else
"non-specified"
#endif
#endif
" format used by Caffe to perform classification.\n"
"Usage:\n"
" convert_mnist_data [FLAGS] input_image_file input_label_file "
"output_db_file\n"
Expand Down
Loading