python实现支持向量回归,包括线性,多项式,径向基
2017-06-02 23:31
821 查看
from sklearn.datasets import load_boston
boston = load_boston()
from sklearn.cross_validation import train_test_split
import numpy as np;
X = boston.data
y = boston.target
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 33, test_size = 0.25)
print 'The max target value is: ', np.max(boston.target)
print 'The min target value is: ', np.min(boston.target)
print 'The average terget value is: ', np.mean(boston.target)
from sklearn.preprocessing import StandardScaler
ss_X = StandardScaler()
ss_y = StandardScaler()
X_train = ss_X.fit_transform(X_train)
X_test = ss_X.transform(X_test)
y_train = ss_y.fit_transform(y_train)
y_test = ss_y.transform(y_test)
from sklearn.svm import SVR
linear_svr = SVR(kernel = 'linear')
linear_svr.fit(X_train, y_train)
linear_svr_y_predict = linear_svr.predict(X_test)
poly_svr = SVR(kernel = 'poly')
poly_svr.fit(X_train, y_train)
poly_svr_y_predict = poly_svr.predict(X_test)
rbf_svr = SVR(kernel = 'rbf')
rbf_svr.fit(X_train, y_train)
rbf_svr_y_predict = rbf_svr.predict(X_test)
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
print 'R-squared value of linear SVR is: ', linear_svr.score(X_test, y_test)
print 'The mean squared error of linear SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(linear_svr_y_predict))
print 'The mean absolute error of lin SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(linear_svr_y_predict))
print 'R-squared of ploy SVR is: ', poly_svr.score(X_test, y_test)
print 'the value of mean squared error of poly SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(poly_svr_y_predict))
print 'the value of mean ssbsolute error of poly SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(poly_svr_y_predict))
print 'R-squared of rbf SVR is: ', rbf_svr.score(X_test, y_test)
print 'the value of mean squared error of rbf SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(rbf_svr_y_predict))
print 'the value of mean ssbsolute error of rbf SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(rbf_svr_y_predict))
结果显示,径向基核函数的效果最好
boston = load_boston()
from sklearn.cross_validation import train_test_split
import numpy as np;
X = boston.data
y = boston.target
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 33, test_size = 0.25)
print 'The max target value is: ', np.max(boston.target)
print 'The min target value is: ', np.min(boston.target)
print 'The average terget value is: ', np.mean(boston.target)
from sklearn.preprocessing import StandardScaler
ss_X = StandardScaler()
ss_y = StandardScaler()
X_train = ss_X.fit_transform(X_train)
X_test = ss_X.transform(X_test)
y_train = ss_y.fit_transform(y_train)
y_test = ss_y.transform(y_test)
from sklearn.svm import SVR
linear_svr = SVR(kernel = 'linear')
linear_svr.fit(X_train, y_train)
linear_svr_y_predict = linear_svr.predict(X_test)
poly_svr = SVR(kernel = 'poly')
poly_svr.fit(X_train, y_train)
poly_svr_y_predict = poly_svr.predict(X_test)
rbf_svr = SVR(kernel = 'rbf')
rbf_svr.fit(X_train, y_train)
rbf_svr_y_predict = rbf_svr.predict(X_test)
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
print 'R-squared value of linear SVR is: ', linear_svr.score(X_test, y_test)
print 'The mean squared error of linear SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(linear_svr_y_predict))
print 'The mean absolute error of lin SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(linear_svr_y_predict))
print 'R-squared of ploy SVR is: ', poly_svr.score(X_test, y_test)
print 'the value of mean squared error of poly SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(poly_svr_y_predict))
print 'the value of mean ssbsolute error of poly SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(poly_svr_y_predict))
print 'R-squared of rbf SVR is: ', rbf_svr.score(X_test, y_test)
print 'the value of mean squared error of rbf SVR is: ', mean_squared_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(rbf_svr_y_predict))
print 'the value of mean ssbsolute error of rbf SVR is: ', mean_absolute_error(ss_y.inverse_transform(y_test), ss_y.inverse_transform(rbf_svr_y_predict))
结果显示,径向基核函数的效果最好
相关文章推荐
- 线性回归总结及python实现
- 【Stanford|斯坦福-机器学习:线性回归-单特征梯度下降+动态图】python3实现
- 机器学习之线性回归python实现
- python实现简单线性回归
- Python实现机器学习--实现多元线性回归
- 梯度下降法 线性回归 多项式回归 python实现
- 线性回归python实现
- 简单线性回归的Python实现
- 机器学习入门学习笔记:(2.2)线性回归python程序实现
- 用Python实现机器学习算法:线性回归
- 线性支持向量分类机及其实现
- [置顶] 【算法 机器学习】MATLAB、R、python三种编程语言实现简单线性回归算法比较
- 机器学习实战笔记(Python实现)-08-线性回归
- 机器学习:线性回归与Python代码实现
- 机器学习算法的Python实现 (1):logistics回归 与 线性判别分析(LDA)
- 线性回归的python实现
- 回归分析---线性回归原理和Python实现
- python实现集成回归算法,包括随机森林,极端随机森林,梯度boosting算法
- Linear Regression 线性回归sklearn python实现
- 机器学习理论篇之线性回归(python实现)