感知器的实现C++

#include<iostream>
#include<stdlib.h>
#include<math.h>
using namespace std;
#define innode 2
#define outnode 1
#define sample 4
class perceptron
{
public:
    perceptron();
    ~perceptron();
    double w[outnode][innode];
    double b[outnode];
    void init();
    void train(double p[sample][innode], double t[sample][outnode]);
    void pattern(double *p);
    double randval(double low, double high);
    void initval(double w[], int n);
    int hardlim(double val);
    double lr;
    double error;
};
int perceptron::hardlim(double val)
{
    if (val >= 0.0)
    {
        return 1;
    }
    else
    {
        return 0;
    }
}
double perceptron::randval(double low, double high)
{
    double val;
    val = ((double)rand() / (double)RAND_MAX) + (high - low) + low;
    return val;
}
void perceptron::initval(double w[], int n)
{
    for (int i = 0; i < n; i++)
    {
        w[i] = randval(-0.01, 0.01);
    }
}
void perceptron::init()
{
    initval((double*)w, innode*outnode);
    initval(b, outnode);
}
void perceptron::train(double p[sample][innode], double t[sample][outnode])
{
    double err[outnode];
    double nodesum[outnode];
    double nodeout[outnode];
    double x[innode];
    double d[outnode];
    for (int i = 0; i < sample; i++)
    {
        for (int j = 0; j < innode; j++)
        {
            x[j] = p[i][j];
        }
        for (int k = 0; k < outnode; k++)
        {
            d[k] = t[i][k];
        }
        for (int k = 0; k < outnode; k++)
        {
            nodesum[k] = 0.0;
            for (int j = 0; j < innode; j++)
            {
                nodesum[k] += w[k][j] * x[j];
            }
            nodeout[k] = hardlim(nodesum[k] + b[k]);
        }
        for (int k = 0; k < outnode; k++)
        {
            err[k] = (d[k] - nodeout[k]);
            for (int j = 0; j < innode; j++)
            {
                w[k][j] += lr*err[k] * x[j];
            }
        }
        for (int k = 0; k < outnode; k++)
        {
            b[k] += lr*err[k];
        }
        for (int k = 0; k < outnode; k++)
        {
            error += err[k];
        }
    }
}
void perceptron::pattern(double* p)
{
    double nodesum[outnode];
    double nodeout[outnode];
    double x[innode];
    for (int i = 0; i < innode; i++)
    {
        x[i] = p[i];
    }
    for (int k = 0; k < outnode; k++)
    {
        nodesum[k] = 0.0;
        for (int j = 0; j < innode; j++)
        {
            nodesum[k] += w[k][j] * x[j];
        }
        nodeout[k] = hardlim(nodesum[k] + b[k]);
    }
    for (int i = 0; i < outnode; i++)
    {
        cout << nodeout[i] << endl;
    }
}

perceptron::perceptron()
{
    lr = 0.001;
    error = 0.8;
}
perceptron::~perceptron()
{

}


double X[sample][innode] = {
    { 0,0 },
    { 0,1 },
    { 1,0 },
    { 1,1 }
};
double Y[sample][outnode] =
{
    { 0 },
    { 0 },
    { 0 },
    { 1 }
};
int main()
{
    perceptron Perceptron;
    Perceptron.init();
    int times = 0;
    while (abs(Perceptron.error)> 0.001&&times < 10000)
    {
        times++;
        Perceptron.train(X, Y);
    }
    double m[innode] = { 1,1 };
    Perceptron.pattern(m);
    return 0;

}

原文链接: https://www.cnblogs.com/semen/p/6861826.html

欢迎关注

微信关注下方公众号,第一时间获取干货硬货;公众号内回复【pdf】免费获取数百本计算机经典书籍

原创文章受到原创版权保护。转载请注明出处:https://www.ccppcoding.com/archives/254002

非原创文章文中已经注明原地址,如有侵权,联系删除

关注公众号【高性能架构探索】,第一时间获取最新文章

转载文章受原作者版权保护。转载请注明原作者出处!

(0)
上一篇 2023年2月14日 上午7:28
下一篇 2023年2月14日 上午7:29

相关推荐