本文介绍了python 支持向量机非线性回归SVR模型,废话不多说,具体如下:
import numpy as np import matplotlib.pyplot as plt from sklearn import datasets, linear_model,svm from sklearn.model_selection import train_test_split def load_data_regression(): ''' 加载用于回归问题的数据集 ''' diabetes = datasets.load_diabetes() #使用 scikit-learn 自带的一个糖尿病病人的数据集 # 拆分成训练集和测试集,测试集大小为原始数据集大小的 1/4 return train_test_split(diabetes.data,diabetes.target,test_size=0.25,random_state=0) #支持向量机非线性回归SVR模型 def test_SVR_linear(*data): X_train,X_test,y_train,y_test=data regr=svm.SVR(kernel='linear') regr.fit(X_train,y_train) print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_)) print('Score: %.2f' % regr.score(X_test, y_test)) # 生成用于回归问题的数据集 X_train,X_test,y_train,y_test=load_data_regression() # 调用 test_LinearSVR test_SVR_linear(X_train,X_test,y_train,y_test)
def test_SVR_poly(*data): ''' 测试 多项式核的 SVR 的预测性能随 degree、gamma、coef0 的影响. ''' X_train,X_test,y_train,y_test=data fig=plt.figure() ### 测试 degree #### degrees=range(1,20) train_scores=[] test_scores=[] for degree in degrees: regr=svm.SVR(kernel='poly',degree=degree,coef0=1) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) ax=fig.add_subplot(1,3,1) ax.plot(degrees,train_scores,label="Training score ",marker='+' ) ax.plot(degrees,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_poly_degree r=1") ax.set_xlabel("p") ax.set_ylabel("score") ax.set_ylim(-1,1.) ax.legend(loc="best",framealpha=0.5) ### 测试 gamma,固定 degree为3, coef0 为 1 #### gammas=range(1,40) train_scores=[] test_scores=[] for gamma in gammas: regr=svm.SVR(kernel='poly',gamma=gamma,degree=3,coef0=1) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) ax=fig.add_subplot(1,3,2) ax.plot(gammas,train_scores,label="Training score ",marker='+' ) ax.plot(gammas,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_poly_gamma r=1") ax.set_xlabel(r"$\gamma$") ax.set_ylabel("score") ax.set_ylim(-1,1) ax.legend(loc="best",framealpha=0.5) ### 测试 r,固定 gamma 为 20,degree为 3 ###### rs=range(0,20) train_scores=[] test_scores=[] for r in rs: regr=svm.SVR(kernel='poly',gamma=20,degree=3,coef0=r) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) ax=fig.add_subplot(1,3,3) ax.plot(rs,train_scores,label="Training score ",marker='+' ) ax.plot(rs,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_poly_r gamma=20 degree=3") ax.set_xlabel(r"r") ax.set_ylabel("score") ax.set_ylim(-1,1.) ax.legend(loc="best",framealpha=0.5) plt.show() # 调用 test_SVR_poly test_SVR_poly(X_train,X_test,y_train,y_test)
def test_SVR_rbf(*data): ''' 测试 高斯核的 SVR 的预测性能随 gamma 参数的影响 ''' X_train,X_test,y_train,y_test=data gammas=range(1,20) train_scores=[] test_scores=[] for gamma in gammas: regr=svm.SVR(kernel='rbf',gamma=gamma) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) fig=plt.figure() ax=fig.add_subplot(1,1,1) ax.plot(gammas,train_scores,label="Training score ",marker='+' ) ax.plot(gammas,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_rbf") ax.set_xlabel(r"$\gamma$") ax.set_ylabel("score") ax.set_ylim(-1,1) ax.legend(loc="best",framealpha=0.5) plt.show() # 调用 test_SVR_rbf test_SVR_rbf(X_train,X_test,y_train,y_test)
def test_SVR_sigmoid(*data): ''' 测试 sigmoid 核的 SVR 的预测性能随 gamma、coef0 的影响. ''' X_train,X_test,y_train,y_test=data fig=plt.figure() ### 测试 gammam,固定 coef0 为 0.01 #### gammas=np.logspace(-1,3) train_scores=[] test_scores=[] for gamma in gammas: regr=svm.SVR(kernel='sigmoid',gamma=gamma,coef0=0.01) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) ax=fig.add_subplot(1,2,1) ax.plot(gammas,train_scores,label="Training score ",marker='+' ) ax.plot(gammas,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_sigmoid_gamma r=0.01") ax.set_xscale("log") ax.set_xlabel(r"$\gamma$") ax.set_ylabel("score") ax.set_ylim(-1,1) ax.legend(loc="best",framealpha=0.5) ### 测试 r ,固定 gamma 为 10 ###### rs=np.linspace(0,5) train_scores=[] test_scores=[] for r in rs: regr=svm.SVR(kernel='sigmoid',coef0=r,gamma=10) regr.fit(X_train,y_train) train_scores.append(regr.score(X_train,y_train)) test_scores.append(regr.score(X_test, y_test)) ax=fig.add_subplot(1,2,2) ax.plot(rs,train_scores,label="Training score ",marker='+' ) ax.plot(rs,test_scores,label= " Testing score ",marker='o' ) ax.set_title( "SVR_sigmoid_r gamma=10") ax.set_xlabel(r"r") ax.set_ylabel("score") ax.set_ylim(-1,1) ax.legend(loc="best",framealpha=0.5) plt.show() # 调用 test_SVR_sigmoid test_SVR_sigmoid(X_train,X_test,y_train,y_test)
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持。
免责声明:本站文章均来自网站采集或用户投稿,网站不提供任何软件下载或自行开发的软件!
如有用户或公司发现本站内容信息存在侵权行为,请邮件告知! 858582#qq.com
白云城资源网 Copyright www.dyhadc.com
暂无“python 机器学习之支持向量机非线性回归SVR模型”评论...
RTX 5090要首发 性能要翻倍!三星展示GDDR7显存
三星在GTC上展示了专为下一代游戏GPU设计的GDDR7内存。
首次推出的GDDR7内存模块密度为16GB,每个模块容量为2GB。其速度预设为32 Gbps(PAM3),但也可以降至28 Gbps,以提高产量和初始阶段的整体性能和成本效益。
据三星表示,GDDR7内存的能效将提高20%,同时工作电压仅为1.1V,低于标准的1.2V。通过采用更新的封装材料和优化的电路设计,使得在高速运行时的发热量降低,GDDR7的热阻比GDDR6降低了70%。
更新日志
2025年01月11日
2025年01月11日
- 小骆驼-《草原狼2(蓝光CD)》[原抓WAV+CUE]
- 群星《欢迎来到我身边 电影原声专辑》[320K/MP3][105.02MB]
- 群星《欢迎来到我身边 电影原声专辑》[FLAC/分轨][480.9MB]
- 雷婷《梦里蓝天HQⅡ》 2023头版限量编号低速原抓[WAV+CUE][463M]
- 群星《2024好听新歌42》AI调整音效【WAV分轨】
- 王思雨-《思念陪着鸿雁飞》WAV
- 王思雨《喜马拉雅HQ》头版限量编号[WAV+CUE]
- 李健《无时无刻》[WAV+CUE][590M]
- 陈奕迅《酝酿》[WAV分轨][502M]
- 卓依婷《化蝶》2CD[WAV+CUE][1.1G]
- 群星《吉他王(黑胶CD)》[WAV+CUE]
- 齐秦《穿乐(穿越)》[WAV+CUE]
- 发烧珍品《数位CD音响测试-动向效果(九)》【WAV+CUE】
- 邝美云《邝美云精装歌集》[DSF][1.6G]
- 吕方《爱一回伤一回》[WAV+CUE][454M]