本文采用的训练方法是牛顿法(Newton Method)。
代码
import numpy as np class LogisticRegression(object): """ Logistic Regression Classifier training by Newton Method """ def __init__(self, error: float = 0.7, max_epoch: int = 100): """ :param error: float, if the distance between new weight and old weight is less than error, the process of traing will break. :param max_epoch: if training epoch >= max_epoch the process of traing will break. """ self.error = error self.max_epoch = max_epoch self.weight = None self.sign = np.vectorize(lambda x: 1 if x >= 0.5 else 0) def p_func(self, X_): """Get P(y=1 | x) :param X_: shape = (n_samples + 1, n_features) :return: shape = (n_samples) """ tmp = np.exp(self.weight @ X_.T) return tmp / (1 + tmp) def diff(self, X_, y, p): """Get derivative :param X_: shape = (n_samples, n_features + 1) :param y: shape = (n_samples) :param p: shape = (n_samples) P(y=1 | x) :return: shape = (n_features + 1) first derivative """ return -(y - p) @ X_ def hess_mat(self, X_, p): """Get Hessian Matrix :param p: shape = (n_samples) P(y=1 | x) :return: shape = (n_features + 1, n_features + 1) second derivative """ hess = np.zeros((X_.shape[1], X_.shape[1])) for i in range(X_.shape[0]): hess += self.X_XT[i] * p[i] * (1 - p[i]) return hess def newton_method(self, X_, y): """Newton Method to calculate weight :param X_: shape = (n_samples + 1, n_features) :param y: shape = (n_samples) :return: None """ self.weight = np.ones(X_.shape[1]) self.X_XT = [] for i in range(X_.shape[0]): t = X_[i, :].reshape((-1, 1)) self.X_XT.append(t @ t.T) for _ in range(self.max_epoch): p = self.p_func(X_) diff = self.diff(X_, y, p) hess = self.hess_mat(X_, p) new_weight = self.weight - (np.linalg.inv(hess) @ diff.reshape((-1, 1))).flatten() if np.linalg.norm(new_weight - self.weight) <= self.error: break self.weight = new_weight def fit(self, X, y): """ :param X_: shape = (n_samples, n_features) :param y: shape = (n_samples) :return: self """ X_ = np.c_[np.ones(X.shape[0]), X] self.newton_method(X_, y) return self def predict(self, X) -> np.array: """ :param X: shape = (n_samples, n_features] :return: shape = (n_samples] """ X_ = np.c_[np.ones(X.shape[0]), X] return self.sign(self.p_func(X_))
测试代码
import matplotlib.pyplot as plt import sklearn.datasets def plot_decision_boundary(pred_func, X, y, title=None): """分类器画图函数,可画出样本点和决策边界 :param pred_func: predict函数 :param X: 训练集X :param y: 训练集Y :return: None """ # Set min and max values and give it some padding x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5 y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5 h = 0.01 # Generate a grid of points with distance h between them xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Predict the function value for the whole gid Z = pred_func(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) # Plot the contour and training examples plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral) plt.scatter(X[:, 0], X[:, 1], s=40, c=y, cmap=plt.cm.Spectral) if title: plt.title(title) plt.show()
效果
更多机器学习代码,请访问 https://github.com/WiseDoge/plume
以上就是python 牛顿法实现逻辑回归(Logistic Regression)的详细内容,更多关于python 逻辑回归的资料请关注其它相关文章!
免责声明:本站文章均来自网站采集或用户投稿,网站不提供任何软件下载或自行开发的软件!
如有用户或公司发现本站内容信息存在侵权行为,请邮件告知! 858582#qq.com
白云城资源网 Copyright www.dyhadc.com
暂无“python 牛顿法实现逻辑回归(Logistic Regression)”评论...
更新日志
2024年11月08日
2024年11月08日
- 雨林唱片《赏》新曲+精选集SACD版[ISO][2.3G]
- 罗大佑与OK男女合唱团.1995-再会吧!素兰【音乐工厂】【WAV+CUE】
- 草蜢.1993-宝贝对不起(国)【宝丽金】【WAV+CUE】
- 杨培安.2009-抒·情(EP)【擎天娱乐】【WAV+CUE】
- 周慧敏《EndlessDream》[WAV+CUE]
- 彭芳《纯色角3》2007[WAV+CUE]
- 江志丰2008-今生为你[豪记][WAV+CUE]
- 罗大佑1994《恋曲2000》音乐工厂[WAV+CUE][1G]
- 群星《一首歌一个故事》赵英俊某些作品重唱企划[FLAC分轨][1G]
- 群星《网易云英文歌曲播放量TOP100》[MP3][1G]
- 方大同.2024-梦想家TheDreamer【赋音乐】【FLAC分轨】
- 李慧珍.2007-爱死了【华谊兄弟】【WAV+CUE】
- 王大文.2019-国际太空站【环球】【FLAC分轨】
- 群星《2022超好听的十倍音质网络歌曲(163)》U盘音乐[WAV分轨][1.1G]
- 童丽《啼笑姻缘》头版限量编号24K金碟[低速原抓WAV+CUE][1.1G]