#SiliconValley4u


Python --> Java : The Guessing Game

This is my first project in java. It is a syntax practice project taking the guessing game I made in python and creating a java version.


import java.util.Random; import java.util.Scanner; class guessing_game { public static void main(String[] args) { /* generate a random number */ Random number = new Random(); int num = number.nextInt(100); System.out.println(num); while (0 != 1) { Scanner guess = new Scanner(System.in); System.out.print("Enter a number value:"); int t = guess.nextInt(); /* x= user input */ if (t == num) { System.out.println("you are correct"); System.exit(0); } else { Scanner guess1 = new Scanner(System.in); System.out.print("Enter a number value:"); int y = guess1.nextInt(); /* x= user input */ } } } }

K Means Clustering

K Means Clustering Algorithm w/ a fixed value for k ---------------------------------------------------------------- Takes in a set of data points and groups them into k # of effective clusters.

""" K Means Clustering By: Kush Arora (June 4,2019) """ ##Imports import pandas as pd import numpy as np import matplotlib.pyplot as plt from math import sqrt ##data df=pd.DataFrame({ "x":[12,20,28,18,29,33,24], "y":[39,36,30,52,54,46,55] }) np.random.seed(200) # of clusters k=3 #global variables l=[] c=[] red=[] blue=[] green=[] ##distance formula def d_formula(x1,x2,y1,y2): return sqrt((x2-x1)**2 + (y2-y1)**2) ##list of centroids centroids=[] for i in range(0,k): centroids.append((np.random.randint(0,80),np.random.randint(0,80))) #formatting fig= plt.figure(figsize=(5,5)) #plotting all the data points plt.scatter(df['x'], df['y'],color='k') #coloring the centroids colmap=[(1,'r'),(2,'g'),(3,'b')] #plotting the centroids for i in range (0,k): a=centroids[i] b=colmap[i] plt.scatter(a[0], a[1], color=b[1]) def c_point(centroids,data): global l for x in centroids: xCoords= df['x'] yCoords=df['y'] for y in range(0,len(yCoords)): var=d_formula(x[0],xCoords[y],x[1],yCoords[y]) l.append(var) def find_nearest(l,k): #list l --> list of distances from points to centroids three=[] global red, blue, green if int(len(l)/k) != 0: for value in range(len(l)-1,-1,-(int(len(l)/k))): three.append(l[value]) #removing the three values after appending to another list l.pop(value) #separates to closest value if three != [] and l != []: if min(three) == three[0]: red.append(min(three)) if min(three) == three[1]: blue.append(min(three)) else: green.append(min(three)) return find_nearest(l,k) ##establishing length of x&y axis plt.xlim(0,80) plt.ylim(0,80) #shows graph plt.show() #running functions c_point(centroids,df) find_nearest(l,k) print("1rst Cluster Distances:",red) print("2nd Cluster Distances:",blue) print("3rd Cluster Distances:",green)

Implementation of the KNN Algorithm

This code uses the concept of K Nearest Neighbors to classify a given point "C" as either red or blue based on its surrounding icons.


""" By: Kush Arora (4/30/19) """ #imports from math import sqrt #prior generated information data= [(1,5,"red"), (10,10,"blue"), (15, 3,"red"), (23,9, "blue"), (21, 2, "red"), (18, 34, "blue"), (3, 24, "red"), (14, 41, "blue"), (13, 10,"red"), (4,20,"blue"), (13, 10,"red"), (9, 11,"blue"), (13, 3,"red")] #defining variables listA=[] count_red=0 count_blue=0 def distance(x1, x2, y1, y2): return sqrt ((x2-x1)**2+(y2-y1)**2) def knn(x,y, k=0): return distance_two(x,y) def distance_two(x,y): a=[] d= data return d_recur(x,y,a,d) def d_recur(x,y,a,d): if len(d) == 0: return a else: current=d[0] dist= distance(current[0], x, current[1], y) current = current + (dist,) a.append(current) d.pop(0) return d_recur(x, y, a, d) x=knn(1,2,k=3) for i in range(0,3): #extracting the red/blue from the list z=x[i] yy=z[2] #checks to see what color chips r within the cirlce radius if yy=="red": count_red += 1 if yy=="blue": count_blue += 1 if count_red > count_blue: print("classified as red") if count_blue > count_red: print("classified as red")

KNN color sorting

This code used the K Nearest Neighbors to classify a given C point as red or blue.

from math import sqrt #defining varibles data=[(1,2,'red'),(2,3,'blue'),(3,4,'red'),(4,5,'blue'),(5,6,'red'),(6,7,'blue'),(7,8,'red'),(8,9,'blue'),(9,10,'red'),(10,11,'blue'),(11,12,'red'),(12,13,'blue'),(13,14,'red'),(14,15,'blue')] xx=int(input("x cordinate of c: ")) yy=int(input("y cordinate of c: ")) kkk=int(input("value of k: ")) kk=(input("What do you want k written as k=_: ")) def distance(x1,x2,y1,y2): return sqrt((x2-x1)**2+(y2-y1)**2) def Knn(x,y, k=0): return distance_two(x,y) def distance_two(x,y): a=[] d=data return(d_recur(x,y,a,d)) def d_recur(x,y,a,d): if len(d)==0: return a else: current = d[0] dist = distance(x,current[0],current[1],y) current=current+(dist,) a.append(current) d.pop(0) return(d_recur(x,y,a,d)) #calling the functions x=(Knn(xx,yy,kk)) #catogrizing by distance to c, and finding color listA=[] count_red=0 count_blue=0 for i in range(0,kkk): z=x[i] yy=z[2] if yy=='red': count_red+=1 if yy == 'blue': count_blue+=1 #Finding what color it is clasified with if count_red > count_blue: print('It is classified as red') if count_blue > count_red: print('It is classified as blue')

K Nearest Neighbors Classification

This is K Nearest Neighbors Classification without any machine learning libraries. For information on K Nearest Neighbors Classification, visit the following video:


""" K Nearest Classification Code By Adam Blumenfeld @nexclap.com/AdamBlumenfeld """ # Import square root function from built-in library math from math import sqrt # Define Data data = [(1, 5, "red"), (10, 10, "blue"), (1, 4, "red"), (15, 10 , "blue"), (15, 11 , "blue"), (15, 12 , "blue"), (2, 6, "red"), (0, 8, "red"), (1, 1, "red"), (1, 5, "red"), (12, 12 , "blue"), (15, 13 , "blue"), (11, 12 , "blue"), (15, 19 , "blue"), (16, 18 , "blue")] # Euclidean Distance def euclid(x1, x2, y1, y2): return sqrt((x2-x1)**2+(y2-y1)**2) # K nearest neigbors engine def Knn(x, y, classes=[],k=3): # find three closest points to point that we want to classify closest = bubblesort(distance(x, y))[:k] # dict with count count = {} # add classes to count ### example: ### count = { ### "red":0, ### "blue":0 ### } for _class in classes: count[_class] = 0 # Add all occurences to count for point in closest: count[point[2]] += 1 # find max number of occurences of class m = [0, None] for _class in count: if count[_class] > m[0]: m[0] = count[_class] m[1] = _class # Done! return m[1] # Bubblesort algorithm (thanks google!) def bubblesort(arr): for i in range(len(arr)): for j in range(len(arr) - i - 1): if arr[j][3] > arr[j + 1][3]: temp = arr[j] arr[j] = arr[j + 1] arr[j + 1] = temp return arr # Finds distance between each each point and the point we want to classify # # For instance: (1, 5, "red") might be 3.1622776601683795 units away # So the new point will be made (1, 5, "red", 3.1622776601683795) # def distance(x, y): a = [] d = data # start recursive process return d_recured(x, y, a, d) # recursive function def d_recured(x, y, a, d): # BASE CASE: If the length of the data list is equal to 0, return the finished list with the distances if len(d) == 0: return a else: # current point current = d[0] # distance from point to point we want to classify dist = euclid(x, current[0], y, current[1]) # concatonate initial tuple to distance # example: (1, 5, "red") ----> (1, 5, "red", 3.1622776601683795) current = current + (dist,) # append new point to list a.append(current) # delete first value from list d.pop(0) return d_recured(x, y, a, d) print(Knn(0, 2, classes=["red", "blue"], k=3))

Recursion

//sum digits of int def sumDigit(num): l=len(list(str(num))) if l == 1: return(num) else: return (num % 10 + sumDigit(int(num / 10))) z=sumDigit(678910) //Find gcd of 2 numbers def gcd(a,b): if(b==0): return a else: return gcd(b,a%b) a=int(input("Enter first number:")) b=int(input("Enter second number:")) GCD=gcd(a,b) print("GCD is: "+ GCD) print(GCD)
1