机器学习 --- Adaboost
2023-12-30 10:19:58
第1关:Boosting
第2关:Adaboost算法
# encoding=utf8
import numpy as np
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier
# adaboost算法
class AdaBoost:
'''
input:n_estimators(int):迭代轮数
learning_rate(float):弱分类器权重缩减系数
'''
def __init__(self, n_estimators=50, learning_rate=1.0):
self.clf_num = n_estimators
self.learning_rate = learning_rate
def init_args(self, datasets, labels):
self.X = datasets
self.Y = labels
self.M, self.N = datasets.shape
# 弱分类器数目和集合
self.clf_sets = []
# 初始化weights
self.weights = [1.0 / self.M] * self.M
# G(x)系数 alpha
self.alpha = []
# ********* Begin *********#
def _G(self, features, labels, weights):
'''
input:features(ndarray):数据特征
labels(ndarray):数据标签
weights(ndarray):样本权重系数
'''
e = 0
for i in range(weights.shape[0]):
if (labels[i] == self.G(self.X[i], self.clif_sets, self.alpha)):
e += weights[i]
return e
# 计算alpha
def _alpha(self, error):
return 0.5 * np.log((1 - error) / error)
# 规范化因子
def _Z(self, weights, a, clf):
return np.sum(weights * np.exp(-a * self.Y * self.G(self.X, clf, self.alpha)))
# 权值更新
def _w(self, a, clf, Z):
w = np.zeros(self.weights.shape)
for i in range(self.M):
w[i] = weights[i] * np.exp(-a * self.Y[i] * G(x, clf, self.alpha)) / Z
self.weights = w
# G(x)的线性组合
def G(self, x, v, direct):
result = 0
x = x.reshape(1, -1)
for i in range(len(v)):
result += v[i].predict(x) * direct[i]
return result
def fit(self, X, y):
'''
X(ndarray):训练数据
y(ndarray):训练标签
'''
# 计算G(x)系数a
self.init_args(X, y)
'''
for i in range(100):
classifier = DecisionTreeClassifier(max_depth=3)
classifier.fit(X, y)
self.clf_sets.append(classifier)
e = 0
for i in range(len(self.weights)):
temp = -1
if classifier.predict(X[i].reshape(1,-1))>0:
temp = 1
if(self.Y[i] == temp):
e += self.weights[i]
a = self._alpha(e)
self.alpha.append(a)
z = self._Z(self.weights, a, self.clf_sets)
self._w(a, self.clf_sets, z)
'''
# 记录分类器
# 规范化因子
# 权值更新
def predict(self, data):
'''
input:data(ndarray):单个样本
output:预测为正样本返回+1,负样本返回-1
'''
ada = AdaBoostClassifier(n_estimators=100, learning_rate=0.1)
ada.fit(self.X, self.Y)
data = data.reshape(1, -1)
predict = ada.predict(data)
return predict[0]
# ********* End *********#
第3关:sklearn中的Adaboost
#encoding=utf8
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier
def ada_classifier(train_data,train_label,test_data):
'''
input:train_data(ndarray):训练数据
train_label(ndarray):训练标签
test_data(ndarray):测试标签
output:predict(ndarray):预测结果
'''
#********* Begin *********#
ada=AdaBoostClassifier(n_estimators=80,learning_rate=1.0)
ada.fit(train_data,train_label)
predict = ada.predict(test_data)
#********* End *********#
return predict
文章来源:https://blog.csdn.net/bm200616/article/details/135298814
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。 如若内容造成侵权/违法违规/事实不符,请联系我的编程经验分享网邮箱:veading@qq.com进行投诉反馈,一经查实,立即删除!
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。 如若内容造成侵权/违法违规/事实不符,请联系我的编程经验分享网邮箱:veading@qq.com进行投诉反馈,一经查实,立即删除!