Skip to content

Commit

Permalink
move to optimizer lib. need to fill in the regularizer
Browse files Browse the repository at this point in the history
  • Loading branch information
dzhwinter committed May 19, 2017
1 parent 0a03b9d commit 92a5d16
Show file tree
Hide file tree
Showing 14 changed files with 43 additions and 20 deletions.
File renamed without changes.
File renamed without changes.
10 changes: 5 additions & 5 deletions paddle/lib/optimizer.cc → paddle/optimizer/optimizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
#include "optimizer_private.h"

int32_t paddle_create_SGDOptimizer(paddle_optimizer* optimizer, double learning_rate) {
optimizer->impl->create_SGDOptimizer(learning_rate)
return LIB_SUCCESS;
optimizer->impl->create_SGDOptimizer(learning_rate);
return PADDLE_SUCCESS;
}

int32_t paddle_release_optimizer(paddle_optimizer* optimizer) {
if(optimizer == nullptr)
return LIB_SUCCESS;
return PADDLE_SUCCESS;
optimizer->impl->destory();
return LIB_SUCCESS;
return PADDLE_SUCCESS;
}

int32_t paddle_update_parameter(paddle_optimizer* optimizer, parameter* param, const gradient* grad,
Expand All @@ -19,5 +19,5 @@ int32_t paddle_update_parameter(paddle_optimizer* optimizer, parameter* param, c
Tensor<datatype> Gradient(grad, num_bytes);
/*! \brief real update hook */
optimizer->impl->update(Parameter, Gradient, learning_rate);
return LIB_SUCCESS;
return PADDLE_SUCCESS;
}
5 changes: 2 additions & 3 deletions paddle/lib/optimizer.h → paddle/optimizer/optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ extern "C" {
} paddle_element_type;

/*! \brief execute status code */
const int32_t LIB_SUCCESS = 0;
const int32_t LIB_WARNING = 1;
const int32_t LIB_ERROR = 2;
const int32_t PADDLE_SUCCESS = 0;
const int32_t PADDLE_ERROR = -1;


typedef void* parameter;
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@

struct paddle_optimizer {
/*! \brief optmizer in C++ side */
ParameterOptimzier *impl;
paddle::optimizer::ParameterOptimzier *impl;
};
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
#include "training_ops.h"

namespace paddle {

template<class T>
class OpitmizerTests {

private:
Expand All @@ -10,4 +13,4 @@ class OpitmizerTests {
void applyGradientDescent_TEST() {

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#include "Tensor.h"

namespace paddle {
namespace lib {
namespace optimizer {

class ParameterOptimizer {
public:
Expand Down
9 changes: 7 additions & 2 deletions paddle/lib/regularizer.h → paddle/optimizer/regularizer.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
#ifndef __PADDLE_LIB_REGULARIZER_H
#define __PADDLE_LIB_REGULARIZER_H
#ifndef PADDLE_LIB_REGULARIZER_H_
#define PADDLE_LIB_REGULARIZER_H_

namespace paddle {
namespace optimizer {


/*! \brief regularizer for L1, L2 */
Expand Down Expand Up @@ -31,5 +34,7 @@ class L1LrRegularizer : public Regularizer {

};

}
}

#endif
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
#ifndef PADDLE_LIB_REGULARIZER_OPS_H
#define PADDLE_LIB_REGULARIZER_OPS_H
#ifndef PADDLE_LIB_REGULARIZER_OPS_H_
#define PADDLE_LIB_REGULARIZER_OPS_H_

namespace paddle {
namespace optimizer {


/*! \brief L1 implement */
template<class T>
Expand All @@ -11,4 +15,7 @@ void applyL1(Tensor<T> &parameter,

}

}
}

#endif
10 changes: 7 additions & 3 deletions paddle/lib/sgd_optimizer.h → paddle/optimizer/sgd_optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@
#include "parameter_optimizer.h"
#include "training_ops.h"

namespace paddle {
namespace optimizer {

class SGDOptimizer : public ParameterOptimizer {
public:
public:
/*! \brief call the applySGD for example */
void update(Tensor<T> &parameter,
const Tensor<T> &gradient,
Expand All @@ -27,7 +30,8 @@ class MomentumOptimizer : public ParameterOptimizer {
applyMomentum(parameter, gradient, momentum, learning_rate, mu, weight_decay);
}

}

};

}
}
#endif
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
#include "training_ops.h"

namespace paddle {
namespace optimizer {

/*! \brief implement different update method
e.g. applyGradientDescentAvx
*/
e.g. applyGradientDescentAvx
*/

template<typename T>
void applyGradientDescent(Tensor<T> &parameter,
Expand All @@ -28,3 +31,5 @@ void applyMomentum(Tensor<T> &parameter,
parameter[i] += momentum[i];
}
}
}
}
File renamed without changes.
File renamed without changes.

0 comments on commit 92a5d16

Please sign in to comment.