Skip to content

Commit

Permalink
Remove tensorflow::setLogging() as thread-unsafe
Browse files Browse the repository at this point in the history
The setLogging() calls setenv(), which is not required to be thread
safe, and specifically in glibc leads to a race condition with any
concurrent getenv() calls.
  • Loading branch information
makortel committed Sep 23, 2024
1 parent 5302658 commit 34d6223
Show file tree
Hide file tree
Showing 19 changed files with 0 additions and 34 deletions.
1 change: 0 additions & 1 deletion DQM/DTMonitorClient/src/DTOccupancyTestML.cc
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ void DTOccupancyTestML::dqmEndLuminosityBlock(DQMStore::IBooker& ibooker,
vector<const DTChamber*> chambers = muonGeom->chambers();

// Load graph
tensorflow::setLogging("3");
edm::FileInPath modelFilePath("DQM/DTMonitorClient/data/occupancy_cnn_v1.pb");
tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(modelFilePath.fullPath());

Expand Down
1 change: 0 additions & 1 deletion L1Trigger/L1CaloTrigger/plugins/L1NNCaloTauEmulator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -717,7 +717,6 @@ void L1NNCaloTauEmulator::produce(edm::Event& iEvent, const edm::EventSetup& eSe
} // End while loop of barrel TowerClusters creation

// Barrel TauMinator application
tensorflow::setLogging("2");
int batchSize_CB = (int)(Nclusters_CB);
tensorflow::TensorShape imageShape_CB({batchSize_CB, IEta_dim, IPhi_dim, 2});
tensorflow::TensorShape positionShape_CB({batchSize_CB, 2});
Expand Down
1 change: 0 additions & 1 deletion L1Trigger/L1CaloTrigger/plugins/L1NNCaloTauProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,6 @@ void L1NNCaloTauProducer::produce(edm::Event& iEvent, const edm::EventSetup& eSe
} // End while loop of endcap TowerClusters creation

// Barrel TauMinator application
tensorflow::setLogging("2");
int batchSize_CB = (int)(l1TowerClustersNxM_CB.size());
tensorflow::TensorShape imageShape_CB({batchSize_CB, IEta_dim, IPhi_dim, 2});
tensorflow::TensorShape positionShape_CB({batchSize_CB, 2});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,6 @@ HGCalConcentratorAutoEncoderImpl::HGCalConcentratorAutoEncoderImpl(const edm::Pa
}
}

tensorflow::setLogging("0");

for (const auto& modelFilePset : modelFilePaths_) {
std::string encoderPath = modelFilePset.getParameter<edm::FileInPath>("encoderModelFile").fullPath();
std::string decoderPath = modelFilePset.getParameter<edm::FileInPath>("decoderModelFile").fullPath();
Expand Down
1 change: 0 additions & 1 deletion L1Trigger/Phase2L1ParticleFlow/plugins/L1NNTauProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ L1NNTauProducer::L1NNTauProducer(const edm::ParameterSet& cfg, const tensorflow:
}

std::unique_ptr<tensorflow::SessionCache> L1NNTauProducer::initializeGlobalCache(const edm::ParameterSet& cfg) {
tensorflow::setLogging("3");
std::string graphPath = edm::FileInPath(cfg.getParameter<std::string>("NNFileName")).fullPath();
return std::make_unique<tensorflow::SessionCache>(graphPath);
}
Expand Down
3 changes: 0 additions & 3 deletions PhysicsTools/TensorFlow/interface/TensorFlow.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,6 @@ namespace tensorflow {
Backend getBackend() const { return _backend; };
};

// set the tensorflow log level
void setLogging(const std::string& level = "3");

// loads a meta graph definition saved at exportDir using the SavedModel interface for a tag and
// predefined options
// transfers ownership
Expand Down
10 changes: 0 additions & 10 deletions PhysicsTools/TensorFlow/src/TensorFlow.cc
Original file line number Diff line number Diff line change
Expand Up @@ -87,16 +87,6 @@ namespace tensorflow {
}
}

void setLogging(const std::string& level) {
/*
* 0 = all messages are logged (default behavior)
* 1 = INFO messages are not printed
* 2 = INFO and WARNING messages are not printed
* 3 = INFO, WARNING, and ERROR messages are not printed
*/
setenv("TF_CPP_MIN_LOG_LEVEL", level.c_str(), 0);
}

MetaGraphDef* loadMetaGraphDef(const std::string& exportDir, const std::string& tag) {
Options default_options{};
return loadMetaGraphDef(exportDir, tag, default_options);
Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testConstSessionCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ process.add_(cms.Service('CUDAService'))

// load the graph
std::string pbFile = dataPath_ + "/constantgraph.pb";
tensorflow::setLogging();
tensorflow::Options options{backend};

tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(pbFile);
Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testGraphLoadingCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ process.add_(cms.Service('CUDAService'))

// load the graph
std::string pbFile = dataPath_ + "/constantgraph.pb";
tensorflow::setLogging();
tensorflow::Options options{backend};
tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(pbFile);
CPPUNIT_ASSERT(graphDef != nullptr);
Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testHelloWorld.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ void testHelloWorld::test() {
// object to load and run the graph / session
tensorflow::Status status;
tensorflow::Options options{backend};
tensorflow::setLogging();
tensorflow::RunOptions runOptions;
tensorflow::SavedModelBundle bundle;

Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testHelloWorldCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ process.add_(cms.Service('CUDAService'))
// object to load and run the graph / session
tensorflow::Status status;
tensorflow::Options options{backend};
tensorflow::setLogging("0");
tensorflow::RunOptions runOptions;
tensorflow::SavedModelBundle bundle;

Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testMetaGraphLoadingCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ process.add_(cms.Service('CUDAService'))

// load the graph
std::string exportDir = dataPath_ + "/simplegraph";
tensorflow::setLogging();
tensorflow::Options options{backend};
tensorflow::MetaGraphDef* metaGraphDef = tensorflow::loadMetaGraphDef(exportDir);
CPPUNIT_ASSERT(metaGraphDef != nullptr);
Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testSessionCacheCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ process.add_(cms.Service('CUDAService'))

// load the graph and the session
std::string pbFile = dataPath_ + "/constantgraph.pb";
tensorflow::setLogging();
tensorflow::Options options{backend};

// load the graph and the session
Expand Down
1 change: 0 additions & 1 deletion PhysicsTools/TensorFlow/test/testThreadPoolsCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ process.add_(cms.Service('CUDAService'))

// load the graph
std::string pbFile = dataPath_ + "/constantgraph.pb";
tensorflow::setLogging();
tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(pbFile);
CPPUNIT_ASSERT(graphDef != nullptr);

Expand Down
2 changes: 0 additions & 2 deletions PhysicsTools/TensorFlow/test/testVisibleDevicesCUDA.cc
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,9 @@ process.add_(cms.Service('CUDAService'))
std::cout << "Testing CUDA backend" << std::endl;
tensorflow::Backend backend = tensorflow::Backend::cuda;
tensorflow::Options options{backend};
tensorflow::setLogging("0");

// load the graph
std::string pbFile = dataPath_ + "/constantgraph.pb";
tensorflow::setLogging();
tensorflow::GraphDef* graphDef = tensorflow::loadGraphDef(pbFile);
CPPUNIT_ASSERT(graphDef != nullptr);

Expand Down
1 change: 0 additions & 1 deletion RecoEcal/EgammaCoreTools/src/DeepSCGraphEvaluation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ const std::vector<std::string> DeepSCGraphEvaluation::availableWindowInputs = {
const std::vector<std::string> DeepSCGraphEvaluation::availableHitsInputs = {"ieta", "iphi", "iz", "en_withfrac"};

DeepSCGraphEvaluation::DeepSCGraphEvaluation(const DeepSCConfiguration& cfg) : cfg_(cfg) {
tensorflow::setLogging("0");
// Init TF graph and session objects
initTensorFlowGraphAndSession();
// Init scaler configs
Expand Down
1 change: 0 additions & 1 deletion RecoMuon/TrackerSeedGenerator/plugins/TSGForOIDNN.cc
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@ TSGForOIDNN::TSGForOIDNN(const edm::ParameterSet& iConfig)
if (getStrategyFromDNN_) {
edm::FileInPath dnnMetadataPath(dnnMetadataPath_);
pt::read_json(dnnMetadataPath.fullPath(), metadata_);
tensorflow::setLogging("3");

if (useRegressor_) {
// use regressor
Expand Down
2 changes: 0 additions & 2 deletions RecoTauTag/HLTProducers/src/L2TauTagNNProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -234,8 +234,6 @@ std::unique_ptr<L2TauNNProducerCacheData> L2TauNNProducer::initializeGlobalCache
cacheData->graphDef = tensorflow::loadGraphDef(graphPath);
cacheData->session = tensorflow::createSession(cacheData->graphDef);

tensorflow::setLogging("2");

boost::property_tree::ptree loadPtreeRoot;
auto const normalizationDict = edm::FileInPath(cfg.getParameter<std::string>("normalizationDict")).fullPath();
boost::property_tree::read_json(normalizationDict, loadPtreeRoot);
Expand Down
2 changes: 0 additions & 2 deletions RecoTauTag/HLTProducers/src/L2TauTagNNProducerAlpaka.cc
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,6 @@ std::unique_ptr<L2TauNNProducerAlpakaCacheData> L2TauNNProducerAlpaka::initializ
cacheData->graphDef = tensorflow::loadGraphDef(graphPath);
cacheData->session = tensorflow::createSession(cacheData->graphDef);

tensorflow::setLogging("2");

boost::property_tree::ptree loadPtreeRoot;
auto const normalizationDict = edm::FileInPath(cfg.getParameter<std::string>("normalizationDict")).fullPath();
boost::property_tree::read_json(normalizationDict, loadPtreeRoot);
Expand Down

0 comments on commit 34d6223

Please sign in to comment.