#Gradient Decent


Linear Regression Using Gradient Decent

This is 𝓛𝓲𝓷𝓮𝓪𝓻 𝓡𝓮𝓰𝓻𝓮𝓼𝓼𝓲𝓸𝓷 𝓤𝓼𝓲𝓷𝓰 𝓖𝓻𝓪𝓭𝓲𝓮𝓷𝓽 𝓓𝓮𝓬𝓮𝓷𝓽 optimized to 50 milliseconds (average of 12).

""" @nexclap/AdamBlumenfeld """ # Imports import numpy as np from random import randint from matplotlib import pyplot as plt # Define Style Of Matplotlib Graphs plt.style.use("ggplot") # Define data X = np.array([1, 3, 5, 6, 8, 10, 11, 18, 19, 20, 24, 26, 30, 32, 36, 38, 39, 40, 43, 46, 52, 55, 56, 58, 59]) y = np.array([3, 4, 5, 7, 8, 9, 10, 12, 14, 15, 21, 36, 37, 38, 39, 40, 43, 46, 49, 51, 54, 56, 58, 60, 69]) # Plot data plt.scatter(X, y) plt.show() #Regressor Class class Regressor: # Training Function def fit(self, X, y, learning_rate=0.00001, converge=0.001, cst=False): # Cst is weather or not to make a history of cost for further analysis self.cst_b = cst if cst: self.cst = [[], []] # Dataset self.X = X self.y = y # Learning rate, or "a" in the gradient decent formula self.learning_rate = learning_rate # The M and B values in the hypothysis function self.theta = [0, 0] # Cost, which initialtes at infinity self.cost = float('inf') # The iterator of the gradient decent algorithm, mine is recursive (Lol, I just had to add that flex) self.gradient_decent_step(converge) # isub for theta, basically saying theta -= (whatever), only for practical reasons, I had to make it a seprete function def theta_isub(self, i, other): self.theta[i] -= other return self.theta[i] # Calculate and update (or store if cst is True) cost def _cost(self, iteration=None): # Cost function self.cost = (1/(2*len(X))*sum([(self.h(X[index]) - y[index])*X[index] for index in range(len(X))])**2) if self.cst_b: # Update cst self.cst[0].append(self.cost) self.cst[1].append(iteration) # Hypothesis function def h(self, x): # h_θ(x) = θ₁ + θ₀x (Yes, I know that in my hypothysis function is switched around) return x*self.theta[0] + self.theta[1] # Gradient decent iterator def gradient_decent_step(self, converge, iteration=1): # Base case: if the cost is less than the set convergence point than accept current theata values if self.cost <= converge: return None # Do one iteration of gradient decent self._step() # Compute cost self._cost(iteration) return self.gradient_decent_step(converge, iteration+1) # All the math of gradient decent, (Now you know why I made the theta_isub function) def _step(self): return [self.theta_isub(0, self.learning_rate * (1/len(X)*sum([(self.h(X[index]) - y[index])*X[index] for index in range(len(X))]))),self.theta_isub(1, self.learning_rate * (1/len(X)*sum([self.h(X[index]) - y[index] for index in range(len(X))])))] # Define a model model = Regressor() # Train model (With cst = True for graphing) model.fit(X, y, cst=True) # Get the theta (M and B values) and the cst variable (or history of cost to iterations) theta = model.theta cst = model.cst # Nerd plot stuff (Plot linear regression graph) x = np.linspace(0,60,100) y1 = theta[0]*x+theta[1] plt.title("Linear Regression") plt.scatter(X, y, c='teal') plt.plot(x, y1) #plt.savefig("linear_regression.png") (Saves graph to file) plt.show() # More nerd plot stuf (Plot cost graph (cst)) plt.title("Cost") plt.plot(cst[1], cst[0]) #plt.savefig("cost.png") (Saves graph to file) plt.show()
1