numpy实现adabosst算法
作者:互联网
import numpy as np
import matplotlib.pyplot as plt
class node:
def __init__(self,index,value,clas):
self.index=index
self.value=value
self.clas=clas
def cpuloss(th,index):
sum=0
sum1=0
for i in range(len(train)):
if train[i,index]<=train[th,index]:
if train_lable[i]!=1:
sum=sum+w1[i]
else:
if train_lable[i]!=-1:
sum=sum+w1[i]
for i in range(len(train)):
if train[i,index]<=train[th,index]:
if train_lable[i]!=-1:
sum1=sum1+w1[i]
else:
if train_lable[i]!=1:
sum1=sum1+w1[i]
if sum>sum1:
return sum1,index,train[th,index],-1
else:
return sum,index,train[th,index],1
def create():
min=1
for i in range(4):
for j in range(len(train)):
k,k1,k2,k3=cpuloss(j,i)
if k<min:
min=k
index=k1
value=k2
cla=k3
no=node(index,value,cla)
return no
def comerror(node):
e=0
for i in range(len(train)):
if train[i,node.index]<=node.value:
if train_lable[i]!=node.clas:
e=e+w1[i]
else:
if node.clas==1:
if train_lable[i]!=-1:
e=e+w1[i]
else:
if train_lable[i]!=1:
e=e+w1[i]
return e
def predict(th,node):
if train[th,node.index]<=node.value:
return node.clas
else:
if node.clas==1:
return -1
else:
return 1
def predict1(th,node):
if test[th,node.index]<=node.value:
return node.clas
else:
if node.clas==1:
return -1
else:
return 1
w=300
train=np.random.randint(-300,300,(w,4))
train=train.astype(float)
train_lable=np.zeros((w,1))
test=np.random.randint(-300,300,(w,4))
traint=train.astype(float)
test_lable=np.zeros((w,1))
for i in range(w):
if 1*train[i,0]+2*train[i,1]+3*train[i,2]+4*train[i,3]>0:
train_lable[i]=1
else:
train_lable[i]=-1
if 1*test[i,0]+2*test[i,1]+3*test[i,2]+4*test[i,3]>0:
test_lable[i]=1
else:
test_lable[i]=-1
for i in range(w):
if(train_lable[i]==0):
print(1)
if(test_lable[i]==0):
print(1)
tree1=[]
a=[]
w1=np.zeros(len(train))
w1=w1+1/len(train)
loss=1
while 1:
if loss==0:
break
no=create()
tree1.append(no)
e=comerror(no)
a.append(np.log((1-e)/e)/2)
z=0
for i in range(len(train)):
z=z+w1[i]*np.exp(-a[len(a)-1]*train_lable[i]*predict(i,tree1[len(tree1)-1]))
for i in range(len(train)):
w1[i]=w1[i]*np.exp(-a[len(a)-1]*train_lable[i]*predict(i,tree1[len(tree1)-1]))/z
loss=0
for i in range(len(train)):
sum=0
for j in range(len(tree1)):
sum=sum+a[j]*predict(i,tree1[j])
if sum>0 and train_lable[i]!=1:
loss+=1
elif sum<0 and train_lable[i]!=-1:
loss+=1
print(loss)
acc=0
for i in range(len(test)):
sum=0
for j in range(len(tree1)):
sum=sum+a[j]*predict1(i,tree1[j])
if sum>0 and test_lable[i]==1:
acc+=1
elif sum<0 and test_lable[i]==-1:
acc+=1
print(acc/len(test))
标签:lable,index,tree1,len,算法,train,test,adabosst,numpy 来源: https://www.cnblogs.com/hahaah/p/15171543.html