
参考:李航.统计学习方法[M]. 北京:清华大学出版社, 2019. 43-45.
import numpy as np
# 感知机对偶形式
class Perceptron:
E: float # 学习率
A: np.array([]) # alpha, A = n * E
W: np.array([]) # weight, W = sum(Ai* yi * xi)
B: float # b = sum(Ai * yi)
Gram: np.array([]) # Gram 矩阵为 [xi · xj] n*n
# 初始化
def __init__(self, E=1.0):
self.E = E
def fit(self, X, Y):
self.Gram = X.dot(X.T)
self.A = np.zeros(X.shape[0])
self.B = 0.0
flag = True
# 是否 未分好类
while flag:
flag = False
for i in range(X.shape[0]):
# 误分条件
condition = (np.multiply(self.A, Y).dot(self.Gram[:, i]) + self.B) * Y[i]
if condition <= 0:
self.A[i] += self.E
self.B += self.E * Y[i]
flag = True
print('A: ', self.A, ' | ', 'B: ', self.B)
self.W = np.multiply(self.A, Y).dot(X)
if __name__ == '__main__':
# <<统计学习方法>> 例 2.2
X = np.array([[3, 3], [4, 3], [1, 1]])
Y = np.array([1, 1, -1])
model = Perceptron()
model.fit(X, Y)
print(model.W)
print(model.B)
msg = ''
for i in range(X.shape[1]):
msg += "{}*X{}".format(model.W[i], i + 1)
if i != X.shape[1] - 1:
msg += ' + '
else:
msg += ' + ({})'.format(model.B)
print("分离超平面为", msg + ' = 0')
print("感知机模型 f(x) = sign({})".format(msg))
欢迎分享,转载请注明来源:内存溢出
微信扫一扫
支付宝扫一扫
评论列表(0条)