6.5 Realization of perceptron learning algorithm
6.5.1 Implementation in original form
Let’s first implement instance 1 of 6.4.1. The Python code is as follows.
# -*- coding: utf-8 -*-
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
import matplotlib
matplotlib.rcParams['font.sans-serif'] = ['SimHei']
matplotlib.rcParams['font.family'] = 'sans-serif'
matplotlib.rcParams['axes.unicode_minus'] = False
Returns: Datamat-data dataset labelmat-label dataset ""
def loadData(filename) :
data = np.loadtxt(filename)
dataMat = data[:, 0:2]
labelMat = data[:, 2]
return dataMat, labelMat
Returns: xarr-data data set yarr-label data set ""
def loadDataSet(filename) :
X = []
Y = []
with open(filename, 'rb') as f:
for idx, line in enumerate(f):
line = line.decode('utf-8').strip()
if not line:
continue
eles = line.split()
eles = list(map(float, eles))
if idx == 0:
numFea = len(eles)
Remove the last data in each row and place it in X
X.append(eles[:-1])
Get the last data for each row
Y.append([eles[-1]])
# Convert X,Y lists into matrices
xArr = np.array(X)
yArr = np.array(Y)
return xArr,yArr
Parameters: dataMat - labelMat - eta Returns: weight, bias
def trainPerceptron(dataMat, labelMat, eta) :
""" Training model ETA: Learning rate ""
m, n = dataMat.shape
weight = np.zeros(n)
bias = 0
flag = True
while flag:
for i in range(m):
if np.any(labelMat[i] * (np.dot(weight, dataMat[i]) + bias) <= 0):
weight = weight + eta * labelMat[i] * dataMat[i].T
bias = bias + eta * labelMat[i]
#print("weight, bias: ", end="")
#print(weight, end=" ")
#print(bias)
flag = True
break
else:
flag = False
return weight, bias
Parameters: dataMat - data set labelMat - weight bias Returns: none
def plotResult(dataMat, labelMat, weight, bias) :
fig = plt.figure()
axes = fig.add_subplot(111)
type1_x = []
type1_y = []
type2_x = []
type2_y = []
for i in range(len(labelMat)):
if (labelMat[i] == -1):
type1_x.append(dataMat[i][0])
type1_y.append(dataMat[i][1])
if (labelMat[i] == 1):
type2_x.append(dataMat[i][0])
type2_y.append(dataMat[i][1])
Method a #
#axes.scatter(type1_x, type1_y, marker='x', s=20, c='red')
#axes.scatter(type2_x, type2_y, marker='o', s=20, c='blue')
Method # 2
plt.plot(type1_x, type1_y, 'bo', type2_x, type2_y, 'rx')
y = (0.1 * -weight[0] / weight[1] + -bias / weight[1].4.0 * -weight[0] / weight[1] + -bias / weight[1])
axes.add_line(Line2D((0.1.4.0), y, linewidth=3, color='yellow'))
plt.axis([0.5.0.5])
plt.grid(True)
plt.title('Perceptron Algorithm ')
plt.xlabel('X1')
plt.ylabel('X2')
plt.show()
if __name__ == "__main__":
## Step 1: load data...
print("Step 1: load data...")
dataMat, labelMat = loadDataSet('testSet.txt')
## Step 2: training...
print("Step 2: training...")
weight, bias = trainPerceptron(dataMat, labelMat, 1)
## Step 3: show the result...
print("Step 3: show the result...")
print("weight,bias:",weight[-1],bias[-1])
## Step 4: show the picture...
print("Step 4: show the picture...")
plotResult(dataMat, labelMat, weight, bias)
Copy the code
The results are as follows:
As you can see, our algorithmic implementation of the data is the same as the manual calculation.
Perceptron_Classify \perceptron_Classify_v1\
Perceptron_Classify_v1. 0. Py 】
6.5.2 Dual form implementation
Let’s first implement example 2 of 6.4.2. The Python code is as follows. The Python code is as follows.
# -*- coding: utf-8 -*-
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
import matplotlib
matplotlib.rcParams['font.sans-serif'] = ['SimHei']
matplotlib.rcParams['font.family'] = 'sans-serif'
matplotlib.rcParams['axes.unicode_minus'] = False
Returns: Datamat-data dataset labelmat-label dataset ""
def loadData(filename) :
data = np.loadtxt(filename)
dataMat = data[:, 0:2]
labelMat = data[:, 2]
return dataMat, labelMat
Returns: xarr-data data set yarr-label data set ""
def loadDataSet(filename) :
X = []
Y = []
with open(filename, 'rb') as f:
for idx, line in enumerate(f):
line = line.decode('utf-8').strip()
if not line:
continue
eles = line.split()
eles = list(map(float, eles))
if idx == 0:
numFea = len(eles)
Remove the last data in each row and place it in X
X.append(eles[:-1])
Get the last data for each row
Y.append([eles[-1]])
# Convert X,Y lists into matrices
xArr = np.array(X)
yArr = np.array(Y)
return xArr,yArr
Parameters: dataMat labelMat alpha b-bias eta-datamat Returns: weight, bias
def trainPerceptron(dataMat, labelMat, alpha, b, eta) :
m =len(dataMat)
flag = True
while flag:
for i in range(m):
if (labelMat[i, 0] * (np.sum((alpha * labelMat * np.dot(dataMat, dataMat[i].T).reshape((m, 1)))) + b)) <= 0:
alpha[i] = alpha[i] + eta
b = b + eta * labelMat[i]
flag = True
break
else:
flag = False
w = np.dot(dataMat.T, alpha * labelMat)
return w, b
Parameters: dataMat - data set labelMat - weight bias Returns: none
def plotResult(dataMat, labelMat, weight, bias) :
fig = plt.figure()
axes = fig.add_subplot(111)
type1_x = []
type1_y = []
type2_x = []
type2_y = []
for i in range(len(labelMat)):
if (labelMat[i] == -1):
type1_x.append(dataMat[i][0])
type1_y.append(dataMat[i][1])
if (labelMat[i] == 1):
type2_x.append(dataMat[i][0])
type2_y.append(dataMat[i][1])
Method a #
#axes.scatter(type1_x, type1_y, marker='x', s=20, c='red')
#axes.scatter(type2_x, type2_y, marker='o', s=20, c='blue')
Method # 2
plt.plot(type1_x, type1_y, 'bo', type2_x, type2_y, 'rx')
y = (0.1 * -weight[0] / weight[1] + -bias / weight[1].4.0 * -weight[0] / weight[1] + -bias / weight[1])
axes.add_line(Line2D((0.1.4.0), y, linewidth=3, color='yellow'))
plt.axis([0.5.0.5])
plt.grid(True)
plt.title('Perceptron Algorithm ')
plt.xlabel('X1')
plt.ylabel('X2')
plt.show()
if __name__ == "__main__":
## Step 1: load data...
print("Step 1: load data...")
dataMat, labelMat = loadDataSet('testSet.txt')
## Step 2: training...
print("Step 2: training...")
alpha = np.zeros((len(dataMat), 1))
b = 0
eta = 1
weight, bias = trainPerceptron(dataMat, labelMat, alpha, b, eta)
## Step 3: show the result...
print("Step 3: show the result...")
print("weight,bias:",weight,bias)
## Step 4: show the picture...
print("Step 4: show the picture...")
plotResult(dataMat, labelMat, weight, bias)
Copy the code
The results are as follows:
As you can see, our algorithmic implementation of the data is the same as the manual calculation.
Perceptron_Classify \perceptron_Classify_v1\
Perceptron_Classify_dual_v1. 1. Py 】
6.6 Realization of perceptron learning algorithm SKlearn
The code looks like this:
# -*- coding: utf-8 -*-
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
import matplotlib
from sklearn.linear_model import Perceptron
matplotlib.rcParams['font.sans-serif'] = ['SimHei']
matplotlib.rcParams['font.family'] = 'sans-serif'
matplotlib.rcParams['axes.unicode_minus'] = False
Returns: Datamat-data dataset labelmat-label dataset ""
def loadData(filename) :
data = np.loadtxt(filename)
dataMat = data[:, 0:2]
labelMat = data[:, 2]
return dataMat, labelMat
Returns: xarr-data data set yarr-label data set ""
def loadDataSet(filename) :
X = []
Y = []
with open(filename, 'rb') as f:
for idx, line in enumerate(f):
line = line.decode('utf-8').strip()
if not line:
continue
eles = line.split()
eles = list(map(float, eles))
if idx == 0:
numFea = len(eles)
Remove the last data in each row and place it in X
X.append(eles[:-1])
Get the last data for each row
Y.append([eles[-1]])
# Convert X,Y lists into matrices
xArr = np.array(X)
yArr = np.array(Y)
return xArr,yArr
Parameters: dataMat - data set labelMat - weight bias Returns: none
def plotResult(dataMat, labelMat, weight, bias) :
fig = plt.figure()
axes = fig.add_subplot(111)
type1_x = []
type1_y = []
type2_x = []
type2_y = []
for i in range(len(labelMat)):
if (labelMat[i] == -1):
type1_x.append(dataMat[i][0])
type1_y.append(dataMat[i][1])
if (labelMat[i] == 1):
type2_x.append(dataMat[i][0])
type2_y.append(dataMat[i][1])
Method a #
#axes.scatter(type1_x, type1_y, marker='x', s=20, c='red')
#axes.scatter(type2_x, type2_y, marker='o', s=20, c='blue')
Method # 2
plt.plot(type1_x, type1_y, 'bo', type2_x, type2_y, 'rx')
y = (0.1 * -weight[0] / weight[1] + -bias / weight[1].4.0 * -weight[0] / weight[1] + -bias / weight[1])
axes.add_line(Line2D((0.1.4.0), y, linewidth=3, color='yellow'))
plt.axis([0.5.0.5])
plt.grid(True)
plt.title('Perceptron Algorithm ')
plt.xlabel('X1')
plt.ylabel('X2')
plt.show()
if __name__ == "__main__":
## Step 1: load data...
print("Step 1: load data...")
dataMat, labelMat = loadData('testSet.txt')
## Step 2: init PLA...
print("Step 2: init PLA...")
clf=Perceptron(fit_intercept=True,shuffle = False,random_state=0,n_iter=30)
## Step 3: training...
print("Step 3: training...")
clf.fit(dataMat, labelMat)
## Step 4: show the result...
print("Step 4: show the result...")
# Get the weight matrix
weights=np.array(clf.coef_).T
print('weights:',weights)
Get the intercept bisa
bias=np.array(clf.intercept_)
print('bias:',bias)
## Step 5: show the picture...
print("Step 5: show the picture...")
plotResult(dataMat, labelMat, weights, bias)
Copy the code
The complete code of perceptron_Classify\perceptron_Classify- Sklearn_v2 perceptron_Classify- Sklearn_v2.0.py shows the following results.
As you can see, the result is the same as the previous calculation.
Reference Documents: English documents: scikit-learn.org/stable/modu… The Chinese document: sklearn.apachecn.org/cn/0.19.0/m…
English API documentation: scikit-learn.org/stable/modu… Chinese API documentation: sklearn.apachecn.org/cn/0.19.0/m…
[1] Li Hang. Statistical Learning Methods
This chapter refers to the code click enter