Battleship

This is a game of Battleship between the user and the CPU.

Battleship

This is a game of battleship.

Rock, Paper, and Scissors

This is a game of rock, paper, and scissors between the CPU and the user. First one to win 10 rounds wins.

Simple Register Form

This is a registration from that asks you your first and last name, you birth year, and your favorite book,sport, and movie

Guessing Game

This is a guessing game in which the user has to guess a number that the CPU is thinking of from 1 to 100 in 5 tries.

Calculator

This is a two number calculator which adds, subtracts, multiplies, and divides two numbers (whole and decimal)/

Rock, Paper, and Scissors

You play Rock, Paper, and Scissors 9 times with the CPU. After the 9 games, the CPU tells you if you beat the CPU or if you lost to it.

Battleship

You have 4 tries to sink the ship and win.

Logistic Regression

This is logistic regression from scratch using python.

x = [0, 1, 2, 3, 5, 6, 7, 8] y = [0, 0, 0, 0, 1, 1, 1, 1] import numpy as np import pandas as pd import matplotlib.pyplot as plt plt.plot(x, y) plt.savefig('plot.png') def slopeDerivative(x, y, slope, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope * x[i]) + yInt difference = (y[i] - predicted) * x[i] total += difference returnValue = (-2/length) * total return returnValue def interceptDerivative(x, y, slope, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope * x[i]) + yInt difference = (y[i] - predicted) total += difference returnValue = (-2/length) * total return returnValue m = 0 c = 0 l = 0.0001 iterations = 100000 for i in range(iterations): derivativeSlope = slopeDerivative(x, y, m ,c) derivativeIntercept = interceptDerivative(x, y, m ,c) m = m - (l * derivativeSlope) print (m) c = c - (l * derivativeIntercept) m = round(m, 3) c = round(c, 3) print ("The slope is " + str(m) + " and the y - intercept = " + str(c)) newY = [] biggest = [] howBigIsTheTumor = float(input("How big is the tumor in cm? ")) userPrediction = 1 / (1 + (2.71828 ** (-1 * ((howBigIsTheTumor*m)+c)))) print ("There is a " + str(userPrediction) + " that the tumor is malignant!") for f in range(0, 8): xValues = [] recentValue = f for i in range(10000): xValues.append(recentValue) recentValue += 0.0001 for i in range(len(xValues)): yprime = 1 / (1 + (2.71828 ** (-1 * ((xValues[i]*m)+c)))) newY.append(yprime) for n in range(len(xValues)): biggest.append(xValues[n]) plt.plot(biggest, newY) plt.savefig("graph")

Linear Regression with Multiple Variables


x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] y = [1, 4, 9, 16, 25, 36, 49, 64, 81, 100] z = [1, 8, 27, 64, 125, 216, 343, 512, 729, 1000] def SlopeD1(x, y, z, slope1, slope2, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope1 * x[i]) + (slope2 * y[i]) + yInt difference = (z[i] - predicted) * x[i] total += difference returnValue = (-2/length) * total return returnValue def SlopeD2(x, y, z, slope1, slope2, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope1 * x[i]) + (slope2 * y[i]) + yInt difference = (z[i] - predicted) * y[i] total += difference returnValue = (-2/length) * total return returnValue def InterceptD(x, y, z, slope1, slope2, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope1 * x[i]) + (slope2 *y[1]) + yInt difference = (z[i] - predicted) total += difference returnValue = (-2/length) * total return returnValue m1 = 0 m2 = 0 c = 0 l = 0.000001 iterations = 99999 for i in range(iterations): mm1 = SlopeD1(x, y, z, m1, m2 ,c) mm2 = SlopeD2(x, y, z, m1, m2 ,c) yint = InterceptD(x, y, z, m1, m2 ,c) m1 = m1 - (l * mm1) m2 = m2 - (l * mm2) c = c - (l * yint) #print(m1, m2, c) print ('(', end ='' ) print(m1, end=', ') print(m2, end=', ') print(c, end=')')

Linear Regression


x = [1, 5, 3, 4, 7, 9, 12, 13, 15, 16, 17, 4, 5, 2, 10, 23, 25] y = [5, 12, 23, 14, 17, 8, 20, 21, 25, 38, 42, 10, 13, 7, 23, 50, 55] def SlopeD(x, y, slope, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope * x[i]) + yInt difference = (y[i] - predicted) * x[i] total += difference returnValue = (-2/length) * total return returnValue def InterceptD(x, y, slope, yInt): length = len(x) total = 0 for i in range(length): predicted = (slope * x[i]) + yInt difference = (y[i] - predicted) total += difference returnValue = (-2/length) * total return returnValue m = 0 c = 0 l = 0.000001 iterations = 99999 for i in range(iterations): slope = SlopeD(x, y, m ,c) intercept = InterceptD(x, y, m ,c) m = m - (l * slope) c = c - (l * intercept) print (m, c)

Linear Regression with any amount of variables

hm = int(input("Enter how many categories or inputs do you want: ")) boo = int(input("How many inputs per category do you want: ")) dataSet = [] for be in range(hm): print ("This is your " + str(be) + "th input set.") inputer = [] for i in range(boo): point = float(input("Enter value: ")) inputer.append(point) dataSet.append(inputer) outputs = [] for i in range(boo): print ("Enter you output.") output = float(input("Enter value: ")) outputs.append(output) dataSet.append(outputs) values = [] for i in range(hm): values.append(0) yIntercept = 0 def slopeDerivativeOne(dataSet, values, hm, boo, yIntercept, wanted): length = hm total = 0 wantedValues = [] a = dataSet[wanted] for i in range(len(a)): wantedValues.append(a[i]) for f in range(length): innerValues = [] for i in range(len(dataSet)): b = dataSet[i] val = b[f] innerValues.append(val) innerTotal = 0 for i in range(len(values)): bobby = values[i] * innerValues[i] innerTotal += bobby innerTotal += yIntercept jj = len(dataSet) - 1 nn = dataSet[jj] difference = (nn[f] - innerTotal) * wantedValues[f] total += difference returnValue = (-2/length) * total return returnValue def DerivativeOne(dataSet, values, hm, boo, yIntercept, wanted): length = hm total = 0 wantedValues = [] a = dataSet[wanted] for i in range(len(a)): wantedValues.append(a[i]) for f in range(length): innerValues = [] for i in range(len(dataSet)): z = dataSet[i] val = z[f] innerValues.append(val) innerTotal = 0 for i in range(len(values)): bobby = values[i] * innerValues[i] innerTotal += bobby innerTotal += yIntercept jj = len(dataSet) - 1 nn = dataSet[jj] difference = (nn[f] - innerTotal) total += difference print (total) returnValue = (-2/length) * total return returnValue def getCost(dataSet, values, hm, boo, yIntercept, wanted): total = 0 length = hm for f in range(length): innerValues = [] for i in range(len(dataSet)): z = dataSet[i] val = z[f] innerValues.append(val) innerTotal = 0 for i in range(len(values)): bobby = values[i] * innerValues[i] innerTotal += bobby innerTotal += yIntercept jj = len(dataSet) - 1 nn = dataSet[jj] difference = (nn[f] - innerTotal) total += difference print (total) return (abs(total)) l = 0.0001 iterations = 100000 previousValues = [] for x in range(len(values)): print (x) previousValues.append(5) previousYInt = 5 j = 0 while (True): j += 1 ds = [] for i in range(len(values)): d = slopeDerivativeOne(dataSet, values, hm, boo, yIntercept, i) word = values[i] - (l * d) values[i] = word d1 = DerivativeOne(dataSet, values, hm, boo, yIntercept, i) yIntercept = yIntercept - (l * d1) print (j) if j > 1000000: False break previousValues = (values) previousYInt = yIntercept for i in range(len(values)): bob = round(values[i], 2) print (bob) bobby = round(yIntercept, 2) print ("y-intercept" + str(bobby))
1 2