2000字范文,分享全网优秀范文,学习好帮手!
2000字范文 > 【机器学习】监督学习--(回归)多元线性回归

【机器学习】监督学习--(回归)多元线性回归

时间:2019-04-07 13:02:55

相关推荐

【机器学习】监督学习--(回归)多元线性回归

注:数据集放在文章末尾

(1)多元线性回归 —— 梯度下降法

port numpy as npfrom numpy import genfromtxtimport matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D # 读入数据 data = genfromtxt(r"Delivery.csv",delimiter=',')# 切分数据x_data = data[:,:-1]y_data = data[:,-1]# 学习率learning ratelr = 0.0001# 参数theta0 = 0theta1 = 0theta2 = 0# 最大迭代次数epochs = 1000# 最小二乘法def compute_error(theta0, theta1, theta2, x_data, y_data):totalError = 0for i in range(0, len(x_data)):totalError += (y_data[i] - (theta1 * x_data[i,0] + theta2*x_data[i,1] + theta0)) ** 2return totalError / float(len(x_data))def gradient_descent_runner(x_data, y_data, theta0, theta1, theta2, lr, epochs):# 计算总数据量m = float(len(x_data))# 循环epochs次for i in range(epochs):theta0_grad = 0theta1_grad = 0theta2_grad = 0# 计算梯度的总和再求平均for j in range(0, len(x_data)):theta0_grad += (1/m) * ((theta1 * x_data[j,0] + theta2*x_data[j,1] + theta0) - y_data[j])theta1_grad += (1/m) * x_data[j,0] * ((theta1 * x_data[j,0] + theta2*x_data[j,1] + theta0) - y_data[j])theta2_grad += (1/m) * x_data[j,1] * ((theta1 * x_data[j,0] + theta2*x_data[j,1] + theta0) - y_data[j])# 更新b和ktheta0 = theta0 - (lr*theta0_grad)theta1 = theta1 - (lr*theta1_grad)theta2 = theta2 - (lr*theta2_grad)return theta0, theta1, theta2print("Starting theta0 = {0}, theta1 = {1}, theta2 = {2}, error = {3}".format(theta0, theta1, theta2, compute_error(theta0, theta1, theta2, x_data, y_data)))print("Running...")theta0, theta1, theta2 = gradient_descent_runner(x_data, y_data, theta0, theta1, theta2, lr, epochs)print("After {0} iterations theta0 = {1}, theta1 = {2}, theta2 = {3}, error = {4}".format(epochs, theta0, theta1, theta2, compute_error(theta0, theta1, theta2, x_data, y_data)))

输出:

# 画图ax = plt.figure().add_subplot(111, projection = '3d') ax.scatter(x_data[:,0], x_data[:,1], y_data, c = 'r', marker = 'o', s = 100) #点为红色三角形 x0 = x_data[:,0]x1 = x_data[:,1]# 生成网格矩阵x0, x1 = np.meshgrid(x0, x1)z = theta0 + x0*theta1 + x1*theta2# 画3D图ax.plot_surface(x0, x1, z)#设置坐标轴 ax.set_xlabel('Miles') ax.set_ylabel('Num of Deliveries') ax.set_zlabel('Time') #显示图像 plt.show()

输出:

(2)多元线性回归 —— sklearn

import numpy as npfrom numpy import genfromtxtfrom sklearn import linear_modelimport matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D # 读入数据 data = genfromtxt(r"Delivery.csv",delimiter=',')# 切分数据x_data = data[:,:-1]y_data = data[:,-1]# 创建模型model = linear_model.LinearRegression()model.fit(x_data, y_data)# 系数print("coefficients:",model.coef_)# 截距print("intercept:",model.intercept_)# 测试x_test = [[102,4]]predict = model.predict(x_test)print("predict:",predict)

输出:

# 画图ax = plt.figure().add_subplot(111, projection = '3d') ax.scatter(x_data[:,0], x_data[:,1], y_data, c = 'r', marker = 'o', s = 100) #点为红色三角形 x0 = x_data[:,0]x1 = x_data[:,1]# 生成网格矩阵x0, x1 = np.meshgrid(x0, x1)z = model.intercept_ + x0*model.coef_[0] + x1*model.coef_[1]# 画3D图ax.plot_surface(x0, x1, z)#设置坐标轴 ax.set_xlabel('Miles') ax.set_ylabel('Num of Deliveries') ax.set_zlabel('Time') #显示图像 plt.show()

输出:

● 数据集:“Delivery.csv”:

100,4,9.350,3,4.8100,4,8.9100,2,6.550,2,4.280,2,6.275,3,7.465,4,690,3,7.690,2,6.1

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。