#Silconvalley4u


Unstable Spiral (In Development) STATUS

Unstable Spiral is a legacy project dedicated to Howard Blumenfeld, author of π˜”π˜¦π˜―π˜΅π˜’π˜­ 𝘈𝘳𝘀𝘩π˜ͺ𝘡𝘦𝘀𝘡𝘢𝘳𝘦 and Mathematics professor at Laspositas college. Unstable Spiral was a website that he worked on in college and for his birthday I'm recreating it with modern taste. I'm using bootstrap for the styling and PHP for the back end. The websites homepage is done and I am currently working on database integration. This website is still in development.

WEBSITE URL: https://repl.it/@Snakebiking49/Unstable-Spiral

Optimized Code for One Split | Decision Tree Regression Without Sklearn

This is the optimized code to find one optimized split.

# Jupyter Notebook In [0]: __________ #Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Plot data plt.scatter(X, y, marker="o") plt.show() __________ Out[0]: Picture 2 In [1]: __________ # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) __________ Out [1]: None In [2]: __________ # OptSplit function def opt(X, y, split=2, step=1, d=False, end=True): split = split + 1 # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) if d: # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) if d: # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _opt(np.delete(X, 0), np.delete(y, 0), split+1, opt=(0, split), optdev=std, step=step, initX=X, inity=y, d=d, end=end) # Recursive function def _opt(X, y, split, opt=None, optdev=None, step=None, initX=None, inity=None, d=False, end=True): if len(X) < split+step: if end: print("DONE WITH RECURSIVE PROCESS") print(f"RESULTS: Optimal Split: ({opt[0]} -> {opt[1]-1}), Optimized Standard Deviation: left:{optdev[0]} right:{optdev[1]}") print("OPT SPLIT GRAPH:") xleft, xright = initX[:opt[1]], initX[opt[1]:] yleft, yright = inity[:opt[1]], inity[opt[1]:] x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) centers = [(x1, y1), (x2, y2)] plt.plot(np.append(opt[1]-1, opt[1]-1), np.array([0, 10])) plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() return (opt, optdev) # Splits xleft, xright = initX[:split], initX[split:] yleft, yright = inity[:split], inity[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) if d: # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Check if opt if std < optdev: optdev = std opt = (0, split) if d: # Plot data plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _opt(np.delete(X, 0), np.delete(y, 0), split+1, opt=opt, optdev=optdev, step=step, initX=initX, inity=inity, d=d, end=end) x = opt(X, y, split=4, d=False) __________ Out [2]: Picture 2

Decision Tree Regression Without Sklearn

Finally finished one split optimization.


#Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) # Train function def train(X, y, split=2, step=1): split = split + 1 # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _train(np.delete(X, 0), np.delete(y, 0), split+1, opt=(0, split), optdev=std, step=step, initX=X, inity=y) # Recursive function def _train(X, y, split, opt=None, optdev=None, step=None, initX=None, inity=None): if len(X) < split+step: print("DONE WITH RECURSIVE PROCESS") print(f"RESULTS: Optimal Split:{opt}, Optimized Standard Deviation: left:{optdev[0]} right:{optdev[1]}") return (opt, optdev) # Splits xleft, xright = initX[:split], initX[split:] yleft, yright = inity[:split], inity[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _train(np.delete(X, 0), np.delete(y, 0), split+1, opt=opt, optdev=optdev, step=step, initX=initX, inity=inity) x = train(X, y, split=4)

Decision Tree Regression Without Sklearn

The main purpose of this program is to do decision tree regression without using any machine learning libraries (Keras, Sklearn, etc.) I'm currently in the process of creating the regressor. I'm using a recursive function to act as the regressor for my task.


#Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation for both sides def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) # Helper function that calculates initial split and calls recersive function def train(X, y, split=2): # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Recursive Function def _train(X, y, split, opt=None, optdev=None): pass train(X, y, split=5)
Akhil Yeleswar Apr 11

nice work!

1