Skip to content

Commit c6742c5

Browse files
authored
readd mkldnn api in paddle/fluid/inference/api/ [fluid_ops] (#74230)
* Fix * fix
1 parent aa06d8f commit c6742c5

File tree

8 files changed

+140
-12
lines changed

8 files changed

+140
-12
lines changed

paddle/fluid/inference/api/paddle_analysis_config.h

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -869,13 +869,13 @@ struct PD_INFER_DECL AnalysisConfig {
869869
/// \brief Turn on OneDNN.
870870
///
871871
///
872-
void EnableONEDNN();
872+
void EnableMKLDNN(); // deprecated
873873

874874
///
875875
/// \brief Turn down OneDNN.
876876
///
877877
///
878-
void DisableONEDNN();
878+
void DisableMKLDNN(); // deprecated
879879

880880
///
881881
/// \brief Set the cache capacity of different input shapes for OneDNN.
@@ -885,14 +885,37 @@ struct PD_INFER_DECL AnalysisConfig {
885885
///
886886
/// \param capacity The cache capacity.
887887
///
888-
void SetOnednnCacheCapacity(int capacity);
888+
void SetMkldnnCacheCapacity(int capacity); // deprecated
889+
889890
///
890891
/// \brief A boolean state telling whether to use the OneDNN.
891892
///
892893
/// \return bool Whether to use the OneDNN.
893894
///
894895
bool mkldnn_enabled() const { return use_onednn_; }
895896

897+
///
898+
/// \brief Turn on OneDNN.
899+
///
900+
///
901+
void EnableONEDNN();
902+
903+
///
904+
/// \brief Turn down OneDNN.
905+
///
906+
///
907+
void DisableONEDNN();
908+
909+
///
910+
/// \brief Set the cache capacity of different input shapes for OneDNN.
911+
/// Default value 0 means not caching any shape.
912+
/// Please see MKL-DNN Data Caching Design Document:
913+
/// https://github.com/PaddlePaddle/FluidDoc/blob/develop/doc/fluid/design/mkldnn/caching/caching.md
914+
///
915+
/// \param capacity The cache capacity.
916+
///
917+
void SetOnednnCacheCapacity(int capacity);
918+
896919
///
897920
/// \brief Set the number of cpu math library threads.
898921
///
@@ -921,6 +944,14 @@ struct PD_INFER_DECL AnalysisConfig {
921944
///
922945
/// \param op_list The operator type list.
923946
///
947+
void SetMKLDNNOp(std::unordered_set<std::string> op_list) { // deprecated
948+
onednn_enabled_op_types_ = op_list;
949+
}
950+
///
951+
/// \brief Specify the operator type list to use OneDNN acceleration.
952+
///
953+
/// \param op_list The operator type list.
954+
///
924955
void SetONEDNNOp(std::unordered_set<std::string> op_list) {
925956
onednn_enabled_op_types_ = op_list;
926957
}
@@ -945,6 +976,11 @@ struct PD_INFER_DECL AnalysisConfig {
945976
///
946977
void EnableMkldnnBfloat16();
947978

979+
///
980+
/// \brief Turn off OneDNN fc passes.
981+
///
982+
void DisableMkldnnFcPasses(); // deprecated
983+
948984
///
949985
/// \brief Turn off OneDNN fc passes.
950986
///

paddle/fluid/inference/api/paddle_api.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,9 @@
3434
#include "paddle_tensor.h" // NOLINT
3535
/*! \namespace paddle
3636
*/
37+
#define ONEDNN_UPDATE_WARNING(api) \
38+
"Warning: The api is deprecated since version 3.x, please use onednn " \
39+
"api " #api "."
3740
namespace paddle {
3841

3942
using PaddleDType = paddle_infer::DataType;

paddle/fluid/inference/api/paddle_pass_builder.cc

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
#include <algorithm>
2929
#include <sstream>
30-
30+
#include "paddle/fluid/inference/api/paddle_api.h"
3131
namespace paddle {
3232

3333
void PaddlePassBuilder::AppendPass(const std::string &pass_type) {
@@ -317,6 +317,10 @@ void GpuPassStrategy::EnableCUDNN() {
317317
use_cudnn_ = true;
318318
}
319319

320+
void GpuPassStrategy::EnableMKLDNN() {
321+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(EnableONEDNN);
322+
EnableONEDNN();
323+
}
320324
void GpuPassStrategy::EnableONEDNN() {
321325
LOG(ERROR) << "GPU not support MKLDNN yet";
322326
}
@@ -329,6 +333,10 @@ void GpuPassStrategy::EnableMkldnnInt8() {
329333
LOG(ERROR) << "GPU not support MKL-DNN int8";
330334
}
331335

336+
void GpuPassStrategy::DisableMkldnnFcPasses() {
337+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(DisableOnednnFcPasses);
338+
DisableOnednnFcPasses();
339+
}
332340
void GpuPassStrategy::DisableOnednnFcPasses() {
333341
LOG(ERROR) << "GPU not support MKL-DNN fc";
334342
}
@@ -343,6 +351,10 @@ CpuPassStrategy::CpuPassStrategy() : PassStrategy({}) {
343351

344352
void CpuPassStrategy::EnableCUDNN() { LOG(ERROR) << "CPU not support cuDNN"; }
345353

354+
void CpuPassStrategy::EnableMKLDNN() {
355+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(EnableONEDNN);
356+
EnableONEDNN();
357+
}
346358
void CpuPassStrategy::EnableONEDNN() {
347359
// TODO(Superjomn) Consider the way to mix CPU with GPU.
348360
#ifdef PADDLE_WITH_DNNL
@@ -389,6 +401,10 @@ void CpuPassStrategy::EnableONEDNN() {
389401
#endif
390402
}
391403

404+
void CpuPassStrategy::DisableMKLDNN() {
405+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(DisableONEDNN);
406+
DisableONEDNN();
407+
}
392408
void CpuPassStrategy::DisableONEDNN() {
393409
ClearPasses();
394410
passes_.assign(CpuBasicPasses.begin(), CpuBasicPasses.end());
@@ -475,6 +491,10 @@ void CpuPassStrategy::EnableMkldnnInt8() {
475491
#endif
476492
}
477493

494+
void CpuPassStrategy::DisableMkldnnFcPasses() {
495+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(DisableOnednnFcPasses);
496+
DisableOnednnFcPasses();
497+
}
478498
void CpuPassStrategy::DisableOnednnFcPasses() {
479499
#ifdef PADDLE_WITH_DNNL
480500
if (!disable_onednn_fc_passes_) {

paddle/fluid/inference/api/paddle_pass_builder.h

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -142,17 +142,28 @@ class PD_INFER_DECL PassStrategy : public PaddlePassBuilder {
142142
/// \brief Enable the use of OneDNN.
143143
/// The OneDNN control exists in both CPU and GPU mode, because there can
144144
/// still be some CPU kernels running in GPU mode.
145-
virtual void EnableONEDNN() {}
145+
virtual void EnableMKLDNN() {} // deprecated
146146

147147
/// \brief Disable the use of OneDNN.
148-
virtual void DisableONEDNN() {}
148+
virtual void DisableMKLDNN() {} // deprecated
149149

150150
/// \brief Enable OneDNN bfloat16.
151151
virtual void EnableMkldnnBfloat16() {}
152152

153153
/// \brief Enable OneDNN int8.
154154
virtual void EnableMkldnnInt8() {}
155155

156+
/// \brief Disable OneDNN fc passes.
157+
virtual void DisableMkldnnFcPasses() {} // deprecated
158+
159+
/// \brief Enable the use of OneDNN.
160+
/// The OneDNN control exists in both CPU and GPU mode, because there can
161+
/// still be some CPU kernels running in GPU mode.
162+
virtual void EnableONEDNN() {}
163+
164+
/// \brief Disable the use of OneDNN.
165+
virtual void DisableONEDNN() {}
166+
156167
/// \brief Disable OneDNN fc passes.
157168
virtual void DisableOnednnFcPasses() {}
158169

@@ -211,17 +222,26 @@ class PD_INFER_DECL CpuPassStrategy : public PassStrategy {
211222
void EnableCUDNN() override;
212223

213224
/// \brief Enable the use of OneDNN.
214-
void EnableONEDNN() override;
225+
void EnableMKLDNN() override; // deprecated
215226

216227
/// \brief Disable the use of OneDNN.
217-
void DisableONEDNN() override;
228+
void DisableMKLDNN() override; // deprecated
218229

219230
/// \brief Enable OneDNN bfloat16.
220231
void EnableMkldnnBfloat16() override;
221232

222233
/// \brief Enable OneDNN int8.
223234
void EnableMkldnnInt8() override;
224235

236+
/// \brief Disable OneDNN fc passes.
237+
void DisableMkldnnFcPasses() override; // deprecated
238+
239+
/// \brief Enable the use of OneDNN.
240+
void EnableONEDNN() override;
241+
242+
/// \brief Disable the use of OneDNN.
243+
void DisableONEDNN() override;
244+
225245
/// \brief Disable OneDNN fc passes.
226246
void DisableOnednnFcPasses() override;
227247

@@ -257,14 +277,20 @@ class PD_INFER_DECL GpuPassStrategy : public PassStrategy {
257277
void EnableCUDNN() override;
258278

259279
/// \brief Not supported in GPU mode yet.
260-
void EnableONEDNN() override;
280+
void EnableMKLDNN() override; // deprecated
261281

262282
/// \brief Not supported in GPU mode yet.
263283
void EnableMkldnnBfloat16() override;
264284

265285
/// \brief Not supported in GPU mode yet.
266286
void EnableMkldnnInt8() override;
267287

288+
/// \brief Disable OneDNN fc passes.
289+
void DisableMkldnnFcPasses() override;
290+
291+
/// \brief Not supported in GPU mode yet.
292+
void EnableONEDNN() override;
293+
268294
/// \brief Disable OneDNN fc passes.
269295
void DisableOnednnFcPasses() override;
270296

paddle/fluid/inference/capi/paddle_c_api.h

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,14 +233,20 @@ typedef struct PD_MaxInputShape {
233233
PADDLE_CAPI_EXPORT extern void PD_SwitchIrDebug(PD_AnalysisConfig* config,
234234
bool x);
235235

236-
PADDLE_CAPI_EXPORT extern void PD_EnableONEDNN(PD_AnalysisConfig* config);
236+
PADDLE_CAPI_EXPORT extern void PD_EnableMKLDNN(
237+
PD_AnalysisConfig* config); // deprecated
237238

238-
PADDLE_CAPI_EXPORT extern void PD_SetOnednnCacheCapacity(
239-
PD_AnalysisConfig* config, int capacity);
239+
PADDLE_CAPI_EXPORT extern void PD_SetMkldnnCacheCapacity(
240+
PD_AnalysisConfig* config, int capacity); // deprecated
240241

241242
PADDLE_CAPI_EXPORT extern bool PD_MkldnnEnabled(
242243
const PD_AnalysisConfig* config);
243244

245+
PADDLE_CAPI_EXPORT extern void PD_EnableONEDNN(PD_AnalysisConfig* config);
246+
247+
PADDLE_CAPI_EXPORT extern void PD_SetOnednnCacheCapacity(
248+
PD_AnalysisConfig* config, int capacity);
249+
244250
PADDLE_CAPI_EXPORT extern void PD_SetCpuMathLibraryNumThreads(
245251
PD_AnalysisConfig* config, int cpu_math_library_num_threads);
246252

paddle/fluid/inference/capi/pd_config.cc

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,10 @@ void PD_SwitchIrDebug(PD_AnalysisConfig* config, bool x) {
277277
config->config.SwitchIrDebug(x);
278278
}
279279

280+
void PD_EnableMKLDNN(PD_AnalysisConfig* config) {
281+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(PD_EnableONEDNN);
282+
PD_EnableONEDNN(config);
283+
}
280284
void PD_EnableONEDNN(PD_AnalysisConfig* config) {
281285
PADDLE_ENFORCE_NOT_NULL(
282286
config,
@@ -285,6 +289,10 @@ void PD_EnableONEDNN(PD_AnalysisConfig* config) {
285289
config->config.EnableONEDNN();
286290
}
287291

292+
void PD_SetMkldnnCacheCapacity(PD_AnalysisConfig* config, int capacity) {
293+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(PD_SetOnednnCacheCapacity);
294+
PD_SetOnednnCacheCapacity(config, capacity);
295+
}
288296
void PD_SetOnednnCacheCapacity(PD_AnalysisConfig* config, int capacity) {
289297
PADDLE_ENFORCE_NOT_NULL(
290298
config,

paddle/fluid/inference/capi_exp/pd_config.cc

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -363,10 +363,21 @@ void PD_ConfigSwitchIrDebug(__pd_keep PD_Config* pd_config, PD_Bool x) {
363363
CHECK_AND_CONVERT_PD_CONFIG;
364364
config->SwitchIrDebug(x);
365365
}
366+
367+
void PD_ConfigEnableMKLDNN(__pd_keep PD_Config* pd_config) {
368+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(PD_ConfigEnableONEDNN);
369+
PD_ConfigEnableONEDNN(pd_config);
370+
}
366371
void PD_ConfigEnableONEDNN(__pd_keep PD_Config* pd_config) {
367372
CHECK_AND_CONVERT_PD_CONFIG;
368373
config->EnableONEDNN();
369374
}
375+
376+
void PD_ConfigSetMkldnnCacheCapacity(__pd_keep PD_Config* pd_config,
377+
int32_t capacity) {
378+
LOG(WARNING) << ONEDNN_UPDATE_WARNING(PD_ConfigSetOnednnCacheCapacity);
379+
PD_ConfigSetOnednnCacheCapacity(pd_config, capacity);
380+
}
370381
void PD_ConfigSetOnednnCacheCapacity(__pd_keep PD_Config* pd_config,
371382
int32_t capacity) {
372383
CHECK_AND_CONVERT_PD_CONFIG;

paddle/fluid/inference/capi_exp/pd_config.h

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -504,6 +504,13 @@ PADDLE_CAPI_EXPORT extern void PD_ConfigSwitchIrDebug(
504504
///
505505
/// \param[in] pd_config config
506506
///
507+
PADDLE_CAPI_EXPORT extern void PD_ConfigEnableMKLDNN(
508+
__pd_keep PD_Config* pd_config); // deprecated
509+
///
510+
/// \brief Turn on OneDNN.
511+
///
512+
/// \param[in] pd_config config
513+
///
507514
PADDLE_CAPI_EXPORT extern void PD_ConfigEnableONEDNN(
508515
__pd_keep PD_Config* pd_config);
509516
///
@@ -515,6 +522,17 @@ PADDLE_CAPI_EXPORT extern void PD_ConfigEnableONEDNN(
515522
/// \param[in] pd_config config
516523
/// \param[in] capacity The cache capacity.
517524
///
525+
PADDLE_CAPI_EXPORT extern void PD_ConfigSetMkldnnCacheCapacity(
526+
__pd_keep PD_Config* pd_config, int32_t capacity); // deprecated
527+
///
528+
/// \brief Set the cache capacity of different input shapes for OneDNN.
529+
/// Default value 0 means not caching any shape.
530+
/// Please see MKL-DNN Data Caching Design Document:
531+
/// https://github.com/PaddlePaddle/FluidDoc/blob/develop/doc/fluid/design/mkldnn/caching/caching.md
532+
///
533+
/// \param[in] pd_config config
534+
/// \param[in] capacity The cache capacity.
535+
///
518536
PADDLE_CAPI_EXPORT extern void PD_ConfigSetOnednnCacheCapacity(
519537
__pd_keep PD_Config* pd_config, int32_t capacity);
520538
///

0 commit comments

Comments
 (0)