forked from tiny-dnn/tiny-dnn
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 9b3e0eb
Showing
13 changed files
with
704 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
# Auto detect text files and perform LF normalization | ||
* text=auto | ||
|
||
# Custom for Visual Studio | ||
*.cs diff=csharp | ||
*.sln merge=union | ||
*.csproj merge=union | ||
*.vbproj merge=union | ||
*.fsproj merge=union | ||
*.dbproj merge=union | ||
|
||
# Standard to msysgit | ||
*.doc diff=astextplain | ||
*.DOC diff=astextplain | ||
*.docx diff=astextplain | ||
*.DOCX diff=astextplain | ||
*.dot diff=astextplain | ||
*.DOT diff=astextplain | ||
*.pdf diff=astextplain | ||
*.PDF diff=astextplain | ||
*.rtf diff=astextplain | ||
*.RTF diff=astextplain |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,163 @@ | ||
################# | ||
## Eclipse | ||
################# | ||
|
||
*.pydevproject | ||
.project | ||
.metadata | ||
bin/ | ||
tmp/ | ||
*.tmp | ||
*.bak | ||
*.swp | ||
*~.nib | ||
local.properties | ||
.classpath | ||
.settings/ | ||
.loadpath | ||
|
||
# External tool builders | ||
.externalToolBuilders/ | ||
|
||
# Locally stored "Eclipse launch configurations" | ||
*.launch | ||
|
||
# CDT-specific | ||
.cproject | ||
|
||
# PDT-specific | ||
.buildpath | ||
|
||
|
||
################# | ||
## Visual Studio | ||
################# | ||
|
||
## Ignore Visual Studio temporary files, build results, and | ||
## files generated by popular Visual Studio add-ons. | ||
|
||
# User-specific files | ||
*.suo | ||
*.user | ||
*.sln.docstates | ||
|
||
# Build results | ||
[Dd]ebug/ | ||
[Rr]elease/ | ||
*_i.c | ||
*_p.c | ||
*.ilk | ||
*.meta | ||
*.obj | ||
*.pch | ||
*.pdb | ||
*.pgc | ||
*.pgd | ||
*.rsp | ||
*.sbr | ||
*.tlb | ||
*.tli | ||
*.tlh | ||
*.tmp | ||
*.vspscc | ||
.builds | ||
*.dotCover | ||
|
||
## TODO: If you have NuGet Package Restore enabled, uncomment this | ||
#packages/ | ||
|
||
# Visual C++ cache files | ||
ipch/ | ||
*.aps | ||
*.ncb | ||
*.opensdf | ||
*.sdf | ||
|
||
# Visual Studio profiler | ||
*.psess | ||
*.vsp | ||
|
||
# ReSharper is a .NET coding add-in | ||
_ReSharper* | ||
|
||
# Installshield output folder | ||
[Ee]xpress | ||
|
||
# DocProject is a documentation generator add-in | ||
DocProject/buildhelp/ | ||
DocProject/Help/*.HxT | ||
DocProject/Help/*.HxC | ||
DocProject/Help/*.hhc | ||
DocProject/Help/*.hhk | ||
DocProject/Help/*.hhp | ||
DocProject/Help/Html2 | ||
DocProject/Help/html | ||
|
||
# Click-Once directory | ||
publish | ||
|
||
# Others | ||
[Bb]in | ||
[Oo]bj | ||
sql | ||
TestResults | ||
*.Cache | ||
ClientBin | ||
stylecop.* | ||
~$* | ||
*.dbmdl | ||
Generated_Code #added for RIA/Silverlight projects | ||
|
||
# Backup & report files from converting an old project file to a newer | ||
# Visual Studio version. Backup files are not needed, because we have git ;-) | ||
_UpgradeReport_Files/ | ||
Backup*/ | ||
UpgradeLog*.XML | ||
|
||
|
||
|
||
############ | ||
## Windows | ||
############ | ||
|
||
# Windows image file caches | ||
Thumbs.db | ||
|
||
# Folder config file | ||
Desktop.ini | ||
|
||
|
||
############# | ||
## Python | ||
############# | ||
|
||
*.py[co] | ||
|
||
# Packages | ||
*.egg | ||
*.egg-info | ||
dist | ||
build | ||
eggs | ||
parts | ||
bin | ||
var | ||
sdist | ||
develop-eggs | ||
.installed.cfg | ||
|
||
# Installer logs | ||
pip-log.txt | ||
|
||
# Unit test / coverage reports | ||
.coverage | ||
.tox | ||
|
||
#Translations | ||
*.mo | ||
|
||
#Mr Developer | ||
.mr.developer.cfg | ||
|
||
# Mac crap | ||
.DS_Store |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
#pragma once | ||
|
||
namespace nn { | ||
|
||
struct sigmoid_activation { | ||
static double f(double x) { return 1.0 / (1.0 + std::exp(-x)); } | ||
static double df(double f_x) { return f_x * (1.0 - f_x); } | ||
}; | ||
|
||
struct tanh_activation { | ||
static double f(double x) { | ||
const double ep = std::exp(x); | ||
const double em = std::exp(-x); | ||
return (ep - em) / (ep + em); | ||
} | ||
static double df(double f_x) { return f_x * (1.0 - f_x); } | ||
}; | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
#pragma once | ||
#include <stdexcept> | ||
#include <algorithm> | ||
#include <iterator> | ||
|
||
#include "util.h" | ||
#include "activation.h" | ||
#include "learner.h" | ||
#include "layer.h" | ||
|
||
namespace nn { | ||
|
||
class cnn { | ||
public: | ||
cnn(double alpha, double lambda) : lambda_(lambda) { | ||
learner_ = new gradient_descent(alpha); | ||
} | ||
|
||
cnn(learner *l, double lambda) : lambda_(lambda), learner_(l) {} | ||
|
||
~cnn(){ | ||
delete learner_; | ||
} | ||
|
||
void add(layer *layer) { | ||
layers_.add(layer); | ||
//layer->unroll(¶ms_, &diffs_); | ||
} | ||
|
||
int in_dim() const { return layers_.head()->in_dim(); } | ||
int out_dim() const { return layers_.tail()->out_dim(); } | ||
|
||
void predict(const vec_t& in, vec_t *out) { | ||
*out = *layers_.head()->forward_propagation(in); | ||
} | ||
|
||
void train(const std::vector<vec_t>& in, const std::vector<vec_t>& training) { | ||
calc_diff(in, training); | ||
// update by delta and learning algorithm | ||
learner_->update(layers_.all_param(), layers_.all_diff()); | ||
} | ||
|
||
bool check(const std::vector<vec_t>& in, const std::vector<vec_t>& training) { | ||
const int dim = layers_.all_param().size(); | ||
vec_t diff1(dim), diff2(dim); | ||
|
||
calc_diff(in, training); | ||
for (int i = 0; i < dim; i++) | ||
diff1[i] = *layers_.all_diff()[i]; | ||
|
||
calc_diff_numeric(in, training); | ||
for (int i = 0; i < dim; i++) | ||
diff2[i] = *layers_.all_diff()[i]; | ||
|
||
float_t diff = sum_square(diff1 - diff2) / dim; | ||
return diff < 1E-5; | ||
} | ||
|
||
float_t loss_function(const std::vector<vec_t>& in, const std::vector<vec_t>& training) { | ||
const int m = in.size(); | ||
float_t loss_score = 0.0; | ||
float_t norm_score = 0.0; | ||
|
||
for (int i = 0; i < m; i++) { | ||
layers_.head()->forward_propagation(in[i]); | ||
loss_score += sum_square(layers_.tail()->output() - training[i]); | ||
} | ||
loss_score /= (2 * m); | ||
|
||
norm_score = lambda_ * sum_square(layers_.weight()) / 2.0; // bias‚ÍŠÜ‚ß‚È‚¢ | ||
return loss_score + norm_score; | ||
} | ||
|
||
private: | ||
void calc_diff(const std::vector<vec_t>& in, const std::vector<vec_t>& training) { | ||
const int m = in.size(); | ||
layers_.reset_diff(); | ||
|
||
for (int i = 0; i < m; i++) { | ||
layers_.head()->forward_propagation(in[i]); | ||
layers_.tail()->back_propagation(in[i], training[i]); | ||
} | ||
|
||
pvec_t& w = layers_.weight(); | ||
pvec_t& dw = layers_.weight_diff(); | ||
for (size_t i = 0; i < w.size(); i++) | ||
*dw[i] = *dw[i] / m + lambda_ * *w[i]; | ||
|
||
pvec_t& b = layers_.bias(); | ||
pvec_t& db = layers_.bias_diff(); | ||
for (size_t i = 0; i < b.size(); i++) | ||
*db[i] = *db[i] / m; | ||
} | ||
|
||
void calc_diff_numeric(const std::vector<vec_t>& in, const std::vector<vec_t>& training) { | ||
static const float_t EPSILON = 1e-4; | ||
const int m = in.size(); | ||
layers_.reset_diff(); | ||
|
||
const int dim = layers_.all_param().size(); | ||
|
||
for (int i = 0; i < dim; i++) { | ||
const float_t v = *layers_.all_param()[i]; | ||
|
||
*layers_.all_param()[i] = v + EPSILON; | ||
const float_t Jp = loss_function(in, training); | ||
|
||
*layers_.all_param()[i] = v - EPSILON; | ||
const float_t Jm = loss_function(in, training); | ||
|
||
const float_t diff = (Jp - Jm) / (2.0 * EPSILON); | ||
*layers_.all_diff()[i] = diff; | ||
} | ||
} | ||
|
||
const double lambda_; // weight decay | ||
layers layers_; | ||
learner *learner_; | ||
}; | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
#pragma once | ||
#include "util.h" | ||
#include "cnn.h" |
Oops, something went wrong.