-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
55 lines (50 loc) · 1.38 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#include<iostream>
#include<vector>
#include<cmath>
#include<string>
#include "logistic.h"
#include "importdata.h"
using namespace std;
int main(){
Data data(3,400,"/home/dark/Mywork/Logistic regression/binary2.csv");
std::vector<int> target = data.gettarget();
data.featurescaling();
std::vector<std::vector<double>> donnee = data.getdata();
std::vector<double> a(data.getfeature());
// initialize the weight vector by 1
for(int k = 0;k <= data.getfeature()-1; k++){
a[k] = 1;
}
// initialize the logistic class by the vector a and a learning equal to 1
logi ac(a,1);
std::vector<double> c(data.getfeature());
// call the gradient optimizer ang get the set of weights
c = ac.graddescent(donnee,target);
cout << "weights values :" <<endl ;
// display them on the console
for(int k = 0;k <= data.getfeature()-1; k++){
cout << c[k] <<endl ;}
//
int True = 0;
int Missed = 0;
for(int k = 0;k<=399;k++){
if ( ac.predict(c,donnee[k]) > 0.5 ){
if (target[k] == 1){
True = True + 1;
}
else{
Missed = Missed + 1;
}
}
if ( ac.predict(c,donnee[k]) < 0.5 ){
if (target[k] == 0){
True = True + 1;
}
else{
Missed = Missed + 1;
}}
}
double ab = (double)Missed/400;
//We compute a percentage of prediction that failed to predict the right label
cout << "Emprical error :" << ab <<endl;
}