1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 | # -*- coding: utf-8 -*- """Day 10_ridge-regression_gradient_descent.ipynb Automatically generated by Colaboratory. Original file is located at """ from google.colab import drive drive.mount('/gdrive') PATH = "/gdrive/My Drive/Colab Notebooks/resources/" # %matplotlib inline import numpy as np import matplotlib import matplotlib.pyplot as plt from sklearn import linear_model from sklearn.metrics import mean_squared_error matplotlib.rc('xtick', labelsize=14) matplotlib.rc('ytick', labelsize=14) def generate_data(n): d = 100 w = np.zeros(d) for i in range(0,10): w[i] = 1.0 # trainx = np.random.normal(size=(n,d)) e = np.random.normal(size=(n)) trainy = np.dot(trainx, w) + e # return trainx, trainy def ridge_regression_GD(x,y,C): w = np.zeros(x.shape[1]) loss= [] b = np.zeros(y.shape) residual = y -( x.dot(w) + b ) deriv_w = -2 * ( residual.dot(x) ) + (2 * C * w) ada = 0.001 while 0.15 < np.sqrt(deriv_w.T.dot(deriv_w)) or np.sqrt(deriv_w.T.dot(deriv_w)) < -0.15: residual = y -( x.dot(w) + b ) deriv_w = -2 * ( residual.dot(x) ) + (2 * C * w) deriv_b = -2 * residual w = w - ada * deriv_w b = b - ada * deriv_b loss.append(residual.dot(residual) + C * w.T.dot(w)) return w,b,loss # Generate 200 data points n = 200 x,y = generate_data(n) # Set regularization constant C = 10.0 # Run gradient descent solver w, b, losses = ridge_regression_GD(x,y,C) # Plot the losses plt.plot(losses,'r') plt.xlabel('Iterations', fontsize=14) plt.ylabel('Loss', fontsize=14) plt.show() def compute_mse(w,b,x,y): residuals = y - (np.dot(x, w) + b) return np.dot(residuals, residuals)/n # Generate 200 data points n = 200 x,y = generate_data(n) # Set regularization constant C = 10.0 # Run gradient descent solver and compute its MSE w, b, losses = ridge_regression_GD(x,y,C) # Use built-in routine for ridge regression and compute MSE regr = linear_model.Ridge(alpha=C) regr.fit(x, y) # Print MSE values and L2 distance between the regression functions print ("MSE of gradient descent solver: ", compute_mse(w,b,x,y)) print ("MSE of built-in solver: ", mean_squared_error(regr.predict(x), y)) print ("Distance between w-coefficients: ", np.linalg.norm(w-regr.coef_)) | cs |
'Python Library > Machine Learning' 카테고리의 다른 글
Day 09_SVM_Sentiment_Analysis (0) | 2019.07.19 |
---|---|
Day 08. Perceptron_Classification_Algorithm (0) | 2019.07.18 |
Day 06.logistic_regression_Sentiment_Analysis (0) | 2019.07.14 |
Day 05_Multivariate Gaussian_Winery_Classifier_MNIST (0) | 2019.07.13 |
Day 05_Multivariate Gaussian_Winery_Classifier (0) | 2019.07.13 |