Closed AuroraRAS closed 7 years ago
I learned how to use it
#include <iostream>
#include "deepcl/DeepCL.h"
#include "deepcl/batch/BatchData.h"
int main(int argc, char *argv[])
{
int numExamples = 4;
float data[] = { -1, -1,
-1, 1,
1, -1,
1, 1 };
EasyCL *cl;
NeuralNet *net;
Trainer *trainer;
/*
* ExpectedData Test Case
*/
cl= new EasyCL();
net = new NeuralNet(cl);
net->addLayer( InputLayerMaker::instance()->numPlanes(2)->imageSize(1) );
net->addLayer( ConvolutionalMaker::instance()->numFilters(4)->filterSize(1)->padZeros()->biased(1) );
net->addLayer( ActivationMaker::instance()->relu() );
net->addLayer( FullyConnectedMaker::instance()->numPlanes(2)->imageSize(1) );
net->addLayer( ActivationMaker::instance()->sigmoid() );
// net->addLayer( SoftMaxMaker::instance() );
// SquareLoss looks better in ExpectedData
net->addLayer( SquareLossMaker::instance() );
// bigger learning rate
trainer = SGD::instance( cl, 0.2f, 0.0f );
// ExpectedData
// more epoch is seems necessary
for (int var = 0; var < 10000; ++var) {
TrainingContext tc(var, 0);
int a =rand()%2;
int b =rand()%2;
float input[2];
input[0] = a? 1: -1;
input[1] = b? 1: -1;
float expectedOutput = a ^ b;
//it's a part of Trainer
// ExpectedData expectedData(1, &expectedOutput);
net->setBatchSize(1);
trainer->trainNet(net, &tc, input, &expectedOutput);
if(var%100 == 0)
std::cout << var << "..." << std::endl;
}
std::cout << std::endl;
std::cout << "ExpectedData Test:" << std::endl;
for (int var1 = 0; var1 < numExamples; ++var1) {
net->forward(&(data[var1*2]));
const float * output = net->getOutput();
std::cout << data[var1*2] << " xor " << data[(var1*2)+1] << " = " << *output << std::endl;
}
delete trainer;
delete net;
delete cl;
Cool. PR for docs/examples welcome :)
I'm trying to understand ExpectedData class in training
LabeledData class and ExpectedData class, they using same parameters and structure in training.
output result of LabeledData look's good
but result of ExpectedData is strange
This my XOR case: