• Python:实现logistic regression逻辑回归算法(附完整源码)


    Python:实现logistic regression逻辑回归算法

    import numpy as np
    from matplotlib import pyplot as plt
    from sklearn import datasets
    
    def sigmoid_function(z):
        return 1 / (1 + np.exp(-z))
    
    
    def cost_function(h, y):
        return (-y * np.log(h) - (1 - y) * np.log(1 - h)).mean()
    
    
    def log_likelihood(X, Y, weights):
        scores = np.dot(X, weights)
        return np.sum(Y * scores - np.log(1 + np.exp(scores)))
    
    
    # here alpha is the learning rate, X is the feature matrix,y is the target matrix
    def logistic_reg(alpha, X, y, max_iterations=70000):
        theta = np.zeros(X.shape[1])
    
        for iterations in range(max_iterations):
            z = np.dot(X, theta)
            h = sigmoid_function(z)
            gradient = np.dot(X.T, h - y) / y.size
            theta = theta - alpha * gradient  # updating the weights
            z = np.dot(X, theta)
            h = sigmoid_function(z)
            J = cost_function(h, y)
            if iterations % 100 == 0:
                print(f"loss: {J} \t")  # printing the loss after every 100 iterations
        return theta
    
    
    # In[68]:
    
    if __name__ == "__main__":
        iris = datasets.load_iris()
        X = iris.data[:, :2]
        y = (iris.target != 0) * 1
    
        alpha = 0.1
        theta = logistic_reg(alpha, X, y, max_iterations=70000)
        print("theta: ", theta)  # printing the theta i.e our weights vector
    
        def predict_prob(X):
            return sigmoid_function(
                np.dot(X, theta)
            )  # predicting the value of probability from the logistic regression algorithm
    
        plt.figure(figsize=(10, 6))
        plt.scatter(X[y == 0][:, 0], X[y == 0][:, 1], color="b", label="0")
        plt.scatter(X[y == 1][:, 0], X[y == 1][:, 1], color="r", label="1")
        (x1_min, x1_max) = (X[:, 0].min(), X[:, 0].max())
        (x2_min, x2_max) = (X[:, 1].min(), X[:, 1].max())
        (xx1, xx2) = np.meshgrid(np.linspace(x1_min, x1_max), np.linspace(x2_min, x2_max))
        grid = np.c_[xx1.ravel(), xx2.ravel()]
        probs = predict_prob(grid).reshape(xx1.shape)
        plt.contour(xx1, xx2, probs, [0.5], linewidths=1, colors="black")
    
        plt.legend()
        plt.show()
    
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47
    • 48
    • 49
    • 50
    • 51
    • 52
    • 53
    • 54
    • 55
    • 56
    • 57
    • 58
    • 59
    • 60
    • 61
    • 62
    • 63
  • 相关阅读:
    极致性能优化:前端SSR渲染利器Qwik.js
    Deepin常用环境配置
    Java计算机毕业设计单车商城源码+系统+数据库+lw文档
    Python匿名函数
    这份神仙面试笔记,简直把所有Java知识面试题写出来了
    7.【散列查找】
    5分钟理解什么是卷积的特征提取
    cobol基本语法
    数据结构与算法-二叉树的遍历
    轻松掌握组件启动之MongoDB:快速入门、Linux安装和Docker配置指南
  • 原文地址:https://blog.csdn.net/it_xiangqiang/article/details/126063000