forked from shogun-toolbox/shogun
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
shogun-toolbox#3555 add meta_example for classifier_perceptron undocu…
…mented-python-example
- Loading branch information
Showing
1 changed file
with
35 additions
and
0 deletions.
There are no files selected for viewing
35 changes: 35 additions & 0 deletions
35
examples/meta/src/neural_nets/classification_perceptron.sg
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
CSVFile f_feats_train("../../data/classifier_binary_2d_linear_features_train.dat") | ||
CSVFile f_feats_test("../../data/classifier_binary_2d_linear_features_test.dat") | ||
CSVFile f_labels_train("../../data/classifier_binary_2d_linear_labels_train.dat") | ||
CSVFile f_labels_test("../../data/classifier_binary_2d_linear_labels_test.dat") | ||
|
||
#![create_features] | ||
RealFeatures features_train(f_feats_train) | ||
RealFeatures features_test(f_feats_test) | ||
BinaryLabels labels_train(f_labels_train) | ||
BinaryLabels labels_test(f_labels_test) | ||
#![create_features] | ||
|
||
#![create_instance] | ||
real learn_rate=1.0 | ||
int max_iter=1000 | ||
Perceptron perceptron(features_train, labels_train) | ||
perceptron.set_learn_rate(learn_rate) | ||
perceptron.set_max_iter(max_iter) | ||
#![create_instance] | ||
|
||
#![train_and_apply] | ||
perceptron.train() | ||
perceptron.set_features(features_test) | ||
Labels labels_predict = perceptron.apply() | ||
#![train_and_apply] | ||
|
||
#![extract_weights] | ||
RealVector weights = perceptron.get_w() | ||
real bias = perceptron.get_bias() | ||
#![extract_weights] | ||
|
||
#![evaluate_accuracy] | ||
AccuracyMeasure eval() | ||
real accuracy = eval.evaluate(labels_predict, labels_test) | ||
#![evaluate_accuracy] |