机器学习08DAY

2023-03-29 22:16:37 来源:博客园


(资料图片)

线性回归波士顿房价预测案例

步骤

导入数据数据分割数据标准化正规方程预测梯度下降预测
# 导入模块import pandas as pd # 导入数据from sklearn.model_selection import train_test_split # 数据分割from sklearn.preprocessing import StandardScaler # 数据标准化from sklearn.linear_model import LinearRegression, SGDRegressor, Ridge # 正规方程,梯度下降, 岭回归from sklearn.metrics import mean_squared_error # 均方差import numpy as np
# 读取Boston房价数据boston = pd.read_csv("./boston_house_prices.csv")y = boston["MEDV"] # MEDV为离散型目标值x = boston.drop(["MEDV"],axis=1) # 其他数据为特征值
x
CRIMZNINDUSCHASNOXRMAGEDISRADTAXPTRATIOBLSTAT
00.0063218.02.3100.5386.57565.24.0900129615.3396.904.98
10.027310.07.0700.4696.42178.94.9671224217.8396.909.14
20.027290.07.0700.4697.18561.14.9671224217.8392.834.03
30.032370.02.1800.4586.99845.86.0622322218.7394.632.94
40.069050.02.1800.4587.14754.26.0622322218.7396.905.33
..........................................
5010.062630.011.9300.5736.59369.12.4786127321.0391.999.67
5020.045270.011.9300.5736.12076.72.2875127321.0396.909.08
5030.060760.011.9300.5736.97691.02.1675127321.0396.905.64
5040.109590.011.9300.5736.79489.32.3889127321.0393.456.48
5050.047410.011.9300.5736.03080.82.5050127321.0396.907.88

506 rows × 13 columns

# 数据标准化需要传入二维数组,所以需要改变目标值的形状y = np.array(y).reshape(-1, 1)
# 划分测试集和训练集x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25)
# 特征值标准化std_x = StandardScaler().fit(x_train)x_train = std_x.transform(x_train)x_test = std_x.transform(x_test)
# 因为特征值标准化后,传入模型的系数会增大,所以目标值也需要进行标准化std_y = StandardScaler().fit(y_train)y_train = std_y.transform(y_train)y_test = std_y.transform(y_test)
# 实例化线性回归lr = LinearRegression()# 传入测试集训练模型lr.fit(x_train,y_train)
LinearRegression()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LinearRegression()
# 查看线性回归的回归系数lr.coef_
array([[-0.11432612,  0.12922939,  0.05168773,  0.0306429 , -0.27800333,         0.26465189,  0.02894241, -0.34962992,  0.31569604, -0.24717234,        -0.26784233,  0.11032066, -0.41354896]])
# 线性回归预测测试集的目标值,std_y.inverse_transform:返回标准化之前的值(反标准化)y_lr_predict = std_y.inverse_transform(lr.predict(x_test))y_lr_predict
array([[16.88302519],       [25.67464426],       [24.11685261],       [23.56287231],       [33.21442377],       [17.44428398],       [25.08538719],       [14.36188824],       [23.8507796 ],       [33.90875038],       [30.19255243],       [13.30811675],       [28.60383216],       [34.6094617 ],       [27.32666762],       [24.88310221],       [21.97377504],       [14.36080511],       [15.19834144],       [18.91688837],       [14.39284881],       [37.4279415 ],       [28.85628069],       [23.47343089],       [30.65979144],       [20.77177982],       [21.29899429],       [13.81410752],       [24.36591359],       [26.91067836],       [19.39456288],       [32.1620506 ],       [19.55908532],       [24.32677646],       [31.64841534],       [30.24445789],       [32.6601561 ],       [25.45770231],       [24.36812628],       [24.89892187],       [39.51204317],       [18.25845589],       [30.78050699],       [32.2023306 ],       [43.40712056],       [25.5830554 ],       [24.18175285],       [22.22948918],       [16.30284868],       [27.20443307],       [ 4.3558633 ],       [18.24971547],       [17.84402513],       [14.26170574],       [13.64455453],       [34.67825232],       [ 8.26805278],       [23.65092602],       [ 6.3965518 ],       [21.25451713],       [15.71560149],       [29.29210802],       [29.4266973 ],       [19.91658528],       [14.95841515],       [20.88449625],       [28.59263417],       [23.78937845],       [23.4489951 ],       [11.0440392 ],       [19.4491492 ],       [15.48416226],       [18.68260651],       [24.20199734],       [15.78191346],       [14.11243619],       [22.94901405],       [24.02549373],       [21.11185284],       [28.57665473],       [ 7.45548609],       [22.77052456],       [ 3.44149312],       [15.93067248],       [25.72200382],       [22.56825235],       [32.70873719],       [17.86289514],       [24.49691931],       [35.25395986],       [26.98360999],       [17.51000169],       [28.08531514],       [21.15268973],       [24.73138251],       [-4.82364972],       [21.34031184],       [21.89560028],       [16.35765837],       [35.32764197],       [40.95997005],       [23.59853443],       [19.92593809],       [34.43871021],       [21.37340243],       [20.48191389],       [23.77537201],       [28.67150943],       [40.73850694],       [29.38542779],       [21.25032737],       [22.15530128],       [31.1447006 ],       [17.18008197],       [38.09276107],       [18.17714902],       [26.01850231],       [13.73181577],       [12.47399654],       [27.01659936],       [18.62962667],       [11.26915964],       [19.48824649],       [23.64510406],       [18.88328087],       [19.49037977],       [13.58238162]])
# 线性回归预测的均方差(损失值)loss_lr = mean_squared_error(std_y.inverse_transform(y_test), y_lr_predict)loss_lr
27.89401984711536
# 实例化梯度下降回归sgd = SGDRegressor()sgd.fit(x_train, y_train)
D:\DeveloperTools\Anaconda\lib\site-packages\sklearn\utils\validation.py:1143: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().  y = column_or_1d(y, warn=True)
SGDRegressor()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
SGDRegressor()
# 查看梯度下降回归的回归系数sgd.coef_
array([-0.09761234,  0.08895746, -0.02421963,  0.02879482, -0.17976106,        0.30861884, -0.00250273, -0.27224473,  0.12435245, -0.0780263 ,       -0.24480836,  0.12012805, -0.38888841])
# 梯度下降回归预测测试集的目标值,std_y.inverse_transform:返回标准化之前的值(反标准化)y_sgd_predict = std_y.inverse_transform(sgd.predict(x_test).reshape(-1,1))y_sgd_predict
array([[15.21420286],       [24.63693863],       [24.39828101],       [24.13982716],       [32.78620978],       [17.93179618],       [26.15279053],       [14.48966421],       [23.47566531],       [33.17239509],       [31.84452891],       [12.45562282],       [27.95300787],       [33.80241039],       [28.49956651],       [24.66480492],       [22.36941513],       [12.77314567],       [16.19679874],       [19.55497851],       [16.56475828],       [37.33119072],       [28.7775393 ],       [20.96986273],       [30.61621249],       [21.02209026],       [21.7295418 ],       [12.81210827],       [24.5110437 ],       [26.43938704],       [18.35264658],       [32.65009183],       [18.43526582],       [23.00618081],       [31.7400822 ],       [29.04743561],       [33.05208407],       [25.74448792],       [24.50083552],       [25.60223044],       [39.54513459],       [17.1185942 ],       [31.03740088],       [31.08938082],       [43.05539907],       [25.73953331],       [24.94663261],       [22.54125585],       [18.28413619],       [26.10355346],       [ 6.00742562],       [17.91294014],       [18.30811745],       [12.44053594],       [12.80928627],       [35.3744289 ],       [ 9.09787342],       [22.93659674],       [ 5.43064498],       [21.74836536],       [14.35146387],       [29.01003788],       [29.08635743],       [22.73088123],       [14.63525207],       [21.85792442],       [27.65781677],       [23.792957  ],       [24.6814747 ],       [10.92976509],       [19.83990001],       [15.96966791],       [18.14900105],       [25.20832651],       [13.27422495],       [14.30232772],       [23.11242467],       [25.77201334],       [19.68444307],       [28.57611678],       [ 7.63364889],       [20.4696819 ],       [ 2.27690801],       [16.55235057],       [25.58622675],       [22.77961526],       [32.47346299],       [17.77241159],       [22.97811939],       [36.08937688],       [26.73491284],       [18.29474336],       [29.46454709],       [21.71750293],       [26.04970043],       [-5.49919448],       [22.22155065],       [22.98441588],       [15.12536374],       [35.73982924],       [40.87874356],       [23.690842  ],       [20.5993433 ],       [35.69123855],       [20.68804356],       [20.94190843],       [26.02227126],       [31.17410177],       [40.95630421],       [29.90544672],       [23.50763821],       [22.27432439],       [29.64014839],       [16.78407484],       [38.12893576],       [17.69781499],       [25.22891716],       [14.21875615],       [12.55974345],       [26.99891265],       [17.65595579],       [ 8.4159419 ],       [19.90142312],       [22.80759632],       [19.16843753],       [19.42995139],       [14.04081021]])
# 梯度下降回归预测的均方差(损失值)loss_sgd = mean_squared_error(std_y.inverse_transform(y_test), y_sgd_predict)loss_sgd
28.05592202385498
# 实例化岭回归 param:alpha(正则化力度)rd = Ridge(alpha=1.0)
# 传入训练集 训练模型rd.fit(x_train,y_train)
Ridge()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
Ridge()
# 查看岭回归的回归系数rd.coef_
array([[-0.11307323,  0.12670886,  0.0472335 ,  0.03097279, -0.27277927,         0.26649452,  0.02738887, -0.34543899,  0.30352311, -0.23553989,        -0.26624461,  0.11041044, -0.4112231 ]])
# 岭回归预测测试集的目标值,std_y.inverse_transform:返回标准化之前的值(反标准化)y_rd_predict = std_y.inverse_transform(rd.predict(x_test))y_rd_predict
array([[16.81586993],       [25.62225283],       [24.13239652],       [23.60178301],       [33.17482664],       [17.47603707],       [25.12448624],       [14.3927178 ],       [23.82242142],       [33.83569284],       [30.25910195],       [13.28992719],       [28.54601232],       [34.54914571],       [27.36491618],       [24.87707782],       [22.00096365],       [14.31750595],       [15.26655896],       [18.95164011],       [14.52104908],       [37.38819398],       [28.82792081],       [23.3211182 ],       [30.6343198 ],       [20.80233876],       [21.31839148],       [13.79005679],       [24.3590396 ],       [26.87702832],       [19.35529157],       [32.16020072],       [19.52355909],       [24.26581358],       [31.63175652],       [30.17323569],       [32.66670796],       [25.47912641],       [24.36217689],       [24.91701584],       [39.47302165],       [18.22458912],       [30.75058024],       [32.14915944],       [43.35075081],       [25.58142763],       [24.22487493],       [22.23864659],       [16.45656221],       [27.14231857],       [ 4.52270441],       [18.23427535],       [17.87417222],       [14.1986027 ],       [13.62643288],       [34.69768313],       [ 8.34275415],       [23.6132958 ],       [ 6.38923846],       [21.27558839],       [15.66185343],       [29.25676316],       [29.39607496],       [20.06328838],       [14.96702673],       [20.93444425],       [28.53639958],       [23.76724172],       [23.49637722],       [11.0745397 ],       [19.48381901],       [15.51875938],       [18.65960692],       [24.24100427],       [15.64918598],       [14.14894164],       [22.94337728],       [24.09499988],       [21.05268108],       [28.55429725],       [ 7.51316118],       [22.62833775],       [ 3.43124359],       [15.98036192],       [25.70480807],       [22.57033657],       [32.66624286],       [17.87124766],       [24.43818932],       [35.27111772],       [26.94613641],       [17.56269425],       [28.14078364],       [21.18918514],       [24.78403264],       [-4.78164143],       [21.36553975],       [21.94334785],       [16.31804996],       [35.31337498],       [40.90768652],       [23.60641046],       [19.94431495],       [34.4813584 ],       [21.35327276],       [20.51324011],       [23.90175952],       [28.77241981],       [40.73752328],       [29.39270623],       [21.38182702],       [22.15806225],       [31.07297608],       [17.17452852],       [38.05954909],       [18.16913598],       [25.97549364],       [13.78567603],       [12.51045123],       [26.99932827],       [18.59193795],       [11.15468796],       [19.52228306],       [23.60713735],       [18.8861402 ],       [19.4947593 ],       [13.61341828]])
# 岭回归预测的均方差(损失值)loss_rd = mean_squared_error(std_y.inverse_transform(y_test), y_rd_predict)loss_rd
27.836735080339313
上一篇:

沈五条升级至六条,网友最关注其中2点,像极了七年前的沈阳楼市|天天速讯

下一篇:

沈五条升级至六条,网友最关注其中2点,像极了七年前的沈阳楼市|天天速讯

推荐阅读