嗯...不怎么难。。
但是,不如用python。
能用gpu跑的就别用cpu跑,实在太慢了。。
C代码:
#include
#include
double LearningRate = 0.0001;
//y = ax + b
double a = 0, b = 0;
double x[100];
double y[100];
void ReadData(){
printf("Begin ReadData: Enter x, y with x (space)y\n");
int i;
for(i = 0 ; i < 100 ; ++i){
scanf("%lf %lf", x + i, y + i);
}
printf("Read Successfully!");
}
double GetLoss(){
int j;
double loss = 0;
for(j = 0 ; j < 100 ; ++j){
int out = a * x[j] + b;
loss += (y[j] - out)*(y[j] - out);
}
loss = loss / 100;
return loss;
}
void Train(){
double Gradient_a = 0;
double Gradient_b = 0;
int i;
printf("Begin Loss:%lf\n",GetLoss());
for(i = 0 ; i < 100000 ; ++i){
printf("Train the %d epoch ", i + 1);
int j;
Gradient_a = 0;
Gradient_b = 0;
for(j = 0 ; j < 100 ; ++j){
int out = a * x[j] + b;
Gradient_b += (2 / 100.) * (out - y[j]);
Gradient_a += (2 / 100.) * x[j] * (out - y[j]);
}
printf("%lf", GetLoss());
a = a - LearningRate * Gradient_a;
b = b - LearningRate * Gradient_b;
printf("a: %lf ,b: %lf ,loss: %lf \n",a ,b ,GetLoss());
}
}
int main(){
ReadData();
Train();
return 0;
}
2020/8/3
算是半弃坑了吧,没多大兴趣了。炼丹耗费心神。如果有不懂得可以联系 w202007308protonmail.com
或简信联系。。。。。。