#Silconvalley4u


Update For Code On Web Scraper

I accidentally forgot to include a filter for prices, here's the updated code.

""" Copyright Adam Blumenfeld All rights reserved """ # These are the variable you have to set Email = "email@example.com" Password = "examplepassword" # Imports import requests from bs4 import BeautifulSoup import smtplib import time import datetime import keyboard as k import sys wait = True def wait_false(): global wait wait = False k.add_hotkey("ctrl+g", lambda: wait_false()) # URLS you want to check URLS = ["https://www.amazon.com/Lysol-Bathroom-Cleaner-Island-Breeze/dp/B06ZZQSH77/ref=sr_1_5?dchild=1&keywords=lysol+spray&qid=1588110272&sr=8-5", "https://www.amazon.com/Lysol-Disinfectant-Spray-Morning-Breeze/dp/B01IAIMYF2/ref=sr_1_1?dchild=1&keywords=lysol+disinfectant&qid=1588133936&sr=8-1", "https://www.amazon.com/Scott-67805-Recycled-Bathroom-Tissue/dp/B0040ZOIZO/ref=pd_ybh_a_5?_encoding=UTF8&psc=1&refRID=Y73XTT0K4CR1PQMFPV3V", "https://www.amazon.com/AmScope-120X-1200X-M30-ABS-KT2-W-Beginner-Microscope/dp/B00GGY85EC/ref=sr_1_1?dchild=1&keywords=microscope&qid=1588207972&s=industrial&sr=1-1"] # Time you want to space out already made emails (If you want to never relese emails on an item again, set it to float('inf')) cooldown_time = 7200 headers = { "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36'} def check(URL, maillist): page = requests.get(URL, headers=headers) soup1 = BeautifulSoup(page.content, 'html.parser') soup = BeautifulSoup(soup1.prettify(), "html.parser") title = soup.find(id="productTitle").get_text().strip().split(',')[0] available = soup.find(id="availability") price = soup.find(id="priceblock_ourprice") if price != None: print("Product is available") send_mail(title, URL, maillist) return True if available == None: print("Product is available") send_mail(title, URL) return True available = available.findChild('span').get_text().strip()[0:15] if available == "Temporarily out": print("Product is available") send_mail(title, URL, maillist) return True print("Product isn't available") return False def send_mail(title, link, maillist): global email global password server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.ehlo() server.login(email, password) subject = f'"{title}" is available!' body = f'Check the Amazon link to see availablility: {link}' msg = f"Subject: {subject}\n\n{body}" server.sendmail( email, maillist, msg ) print('Email Has Been Sent!') server.quit() wait = True cooldown = {} # The time you want between checks time_between_checks = 120 while True: print("Press CTRL+g to fast forward check") i = 0 while i < time_between_checks and wait: time.sleep(1) print("Time since last check:", i, "\\", time_between_checks, end="\r") i += 1 wait = True print("\n") for URL in URLS: time_ = datetime.datetime.now() print(f"Checking... <{time_}> <{URL}>\n") if cooldown.get(URL) != None: if time.time() - cooldown[URL] >= cooldown_time: del cooldown[URL] else: print(f"Cooldown time for URL <{URL}>: {cooldown_time-int((time.time() - cooldown[URL]) % 60)} seconds") continue availability = check(URL, sys.argv) if availability: cooldown[URL] = time.time() print("\n\n")

Web Scraper For Amazon Availibility

This web scraper checks amazon links by URL every two minutes, and emails you when those products are available. There is a cooldown of 4 hours for products that have been found available and emailed to you, but this cooldown only affects the particular item and not all of the URLs. By the way these parameters are completely customizable (you can have your wait time be 5 minutes, your cooldown be infinite, etc.) I made this scraper to help people out with panic buying and the lowered availability of basic items. In my original program, I made an email called amazonbot100@gmail.com to carry out the emails. To set up an email with this program, here are the steps: 1. Create a new email (Recommended, but not required) 2. Enable two-factor authentication for your account (This is required for the next step) 3. Set up App Passwords for your account and create a new one for your computer. 4. Paste in the email address to the Email variable at the top, and paste the app password into the Password variable under that. 4.5. Search up "my user agent" on google and paste it into the user agent 5. Run python -m pip install bs4 6. Run python -m pip install requests 7. Run python -m pip install keyboard Also, I understand that the code isn't the most efficient, especially towards the end, though I was in a rush so don't judge me xD. Usage: python scraper.py email-to-send-1@example.com email-to-send-2@example.com email-to-send-3@example.com email-to-send-you-get-the-point@example.com

""" Copyright Adam Blumenfeld All rights reserved """ # These are the variable you have to set Email = "email@example.com" Password = "examplepassword" # Imports import requests from bs4 import BeautifulSoup import smtplib import time import datetime import keyboard as k import sys wait = True def wait_false(): global wait wait = False k.add_hotkey("ctrl+g", lambda: wait_false()) # URLS you want to check URLS = ["https://www.amazon.com/Lysol-Bathroom-Cleaner-Island-Breeze/dp/B06ZZQSH77/ref=sr_1_5?dchild=1&keywords=lysol+spray&qid=1588110272&sr=8-5", "https://www.amazon.com/Lysol-Disinfectant-Spray-Morning-Breeze/dp/B01IAIMYF2/ref=sr_1_1?dchild=1&keywords=lysol+disinfectant&qid=1588133936&sr=8-1", "https://www.amazon.com/Scott-67805-Recycled-Bathroom-Tissue/dp/B0040ZOIZO/ref=pd_ybh_a_5?_encoding=UTF8&psc=1&refRID=Y73XTT0K4CR1PQMFPV3V", "https://www.amazon.com/AmScope-120X-1200X-M30-ABS-KT2-W-Beginner-Microscope/dp/B00GGY85EC/ref=sr_1_1?dchild=1&keywords=microscope&qid=1588207972&s=industrial&sr=1-1"] # Time you want to space out already made emails (If you want to never relese emails on an item again, set it to float('inf')) cooldown_time = 7200 headers = { "User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36'} def check(URL, maillist): page = requests.get(URL, headers=headers) soup1 = BeautifulSoup(page.content, 'html.parser') soup = BeautifulSoup(soup1.prettify(), "html.parser") title = soup.find(id="productTitle").get_text().strip().split(',')[0] available = soup.find(id="availability") if available == None: print("Product is available") send_mail(title, URL) return True available = available.findChild('span').get_text().strip()[0:15] if available == "Temporarily out": print("Product is available") send_mail(title, URL, maillist) return True print("Product isn't available") return False def send_mail(title, link, maillist): global email global password server = smtplib.SMTP('smtp.gmail.com', 587) server.ehlo() server.starttls() server.ehlo() server.login(email, password) subject = f'"{title}" is available!' body = f'Check the Amazon link to see availablility: {link}' msg = f"Subject: {subject}\n\n{body}" server.sendmail( email, maillist, msg ) print('Email Has Been Sent!') server.quit() wait = True cooldown = {} # The time you want between checks time_between_checks = 120 while True: print("Press CTRL+g to fast forward check") i = 0 while i < time_between_checks and wait: time.sleep(1) print("Time since last check:", i, "\\", time_between_checks, end="\r") i += 1 wait = True print("\n") for URL in URLS: time_ = datetime.datetime.now() print(f"Checking... <{time_}> <{URL}>\n") if cooldown.get(URL) != None: if time.time() - cooldown[URL] >= cooldown_time: del cooldown[URL] else: print(f"Cooldown time for URL <{URL}>: {cooldown_time-int((time.time() - cooldown[URL]) % 60)} seconds") continue availability = check(URL, sys.argv) if availability: cooldown[URL] = time.time() print("\n\n")

Unstable Spiral (In Development) STATUS

Unstable Spiral is a legacy project dedicated to Howard Blumenfeld, author of π˜”π˜¦π˜―π˜΅π˜’π˜­ 𝘈𝘳𝘀𝘩π˜ͺ𝘡𝘦𝘀𝘡𝘢𝘳𝘦 and Mathematics professor at Laspositas college. Unstable Spiral was a website that he worked on in college and for his birthday I'm recreating it with modern taste. I'm using bootstrap for the styling and PHP for the back end. The websites homepage is done and I am currently working on database integration. This website is still in development.

WEBSITE URL: https://repl.it/@Snakebiking49/Unstable-Spiral

Optimized Code for One Split | Decision Tree Regression Without Sklearn

This is the optimized code to find one optimized split.

# Jupyter Notebook In [0]: __________ #Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Plot data plt.scatter(X, y, marker="o") plt.show() __________ Out[0]: Picture 2 In [1]: __________ # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) __________ Out [1]: None In [2]: __________ # OptSplit function def opt(X, y, split=2, step=1, d=False, end=True): split = split + 1 # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) if d: # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) if d: # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _opt(np.delete(X, 0), np.delete(y, 0), split+1, opt=(0, split), optdev=std, step=step, initX=X, inity=y, d=d, end=end) # Recursive function def _opt(X, y, split, opt=None, optdev=None, step=None, initX=None, inity=None, d=False, end=True): if len(X) < split+step: if end: print("DONE WITH RECURSIVE PROCESS") print(f"RESULTS: Optimal Split: ({opt[0]} -> {opt[1]-1}), Optimized Standard Deviation: left:{optdev[0]} right:{optdev[1]}") print("OPT SPLIT GRAPH:") xleft, xright = initX[:opt[1]], initX[opt[1]:] yleft, yright = inity[:opt[1]], inity[opt[1]:] x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) centers = [(x1, y1), (x2, y2)] plt.plot(np.append(opt[1]-1, opt[1]-1), np.array([0, 10])) plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() return (opt, optdev) # Splits xleft, xright = initX[:split], initX[split:] yleft, yright = inity[:split], inity[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) if d: # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Check if opt if std < optdev: optdev = std opt = (0, split) if d: # Plot data plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _opt(np.delete(X, 0), np.delete(y, 0), split+1, opt=opt, optdev=optdev, step=step, initX=initX, inity=inity, d=d, end=end) x = opt(X, y, split=4, d=False) __________ Out [2]: Picture 2

Decision Tree Regression Without Sklearn

Finally finished one split optimization.


#Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) # Train function def train(X, y, split=2, step=1): split = split + 1 # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _train(np.delete(X, 0), np.delete(y, 0), split+1, opt=(0, split), optdev=std, step=step, initX=X, inity=y) # Recursive function def _train(X, y, split, opt=None, optdev=None, step=None, initX=None, inity=None): if len(X) < split+step: print("DONE WITH RECURSIVE PROCESS") print(f"RESULTS: Optimal Split:{opt}, Optimized Standard Deviation: left:{optdev[0]} right:{optdev[1]}") return (opt, optdev) # Splits xleft, xright = initX[:split], initX[split:] yleft, yright = inity[:split], inity[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(initX, inity, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Call recursive function "regressor" return _train(np.delete(X, 0), np.delete(y, 0), split+1, opt=opt, optdev=optdev, step=step, initX=initX, inity=inity) x = train(X, y, split=4)

Decision Tree Regression Without Sklearn

The main purpose of this program is to do decision tree regression without using any machine learning libraries (Keras, Sklearn, etc.) I'm currently in the process of creating the regressor. I'm using a recursive function to act as the regressor for my task.


#Imports from matplotlib import pyplot as plt from math import sqrt import numpy as np # Data X = np.array([1, 1, 2, 1, 2, 5, 6, 5, 7, 5]) y = np.array([2, 3, 2, 3, 3, 6, 7, 6 , 7, 7]) # Euclidean Distance def distance(x1, x2, y1, y2): return sqrt((x2 - x1)**2+(y2 - y1)**2) # Sum def sum(a): final = 0 for i in a: final += i return final # Calculate standard deviation for both sides def deviate(xleft, xright, yleft, yright, centroids): distance1 = [] distance2 = [] lxc, lyc = centroids[0] rxc, ryc = centroids[1] for i in range(0, len(xleft)-1): x = xleft[i] y = yleft[i] d = distance(lxc, x, lyc, y) distance1.append(d) for i in range(0, len(xright)-1): x = xright[i] y = yright[i] d = distance(rxc, x, ryc, y) distance2.append(d) return (np.std(distance1), np.std(distance2)) # Helper function that calculates initial split and calls recersive function def train(X, y, split=2): # Splits xleft, xright = X[:split], X[split:] yleft, yright = y[:split], y[split:] # Calculate centroids of each side x1, x2 = sum(xleft)/len(xleft), sum(xright)/len(xright) y1, y2 = sum(yleft)/len(yleft), sum(yright)/len(yright) # plot split plt.plot(np.append(split-1, split-1), np.array([0, 10])) centers = [(x1, y1), (x2, y2)] std = deviate(xleft, xright, yleft, yright, centers) # Plot data plt.scatter(X, y, marker="o") # plot centers plt.scatter(np.array(centers[0][0]), np.array(centers[0][1]), marker="v") plt.scatter(np.array(centers[1][0]), np.array(centers[1][1]), marker="v") plt.show() # Recursive Function def _train(X, y, split, opt=None, optdev=None): pass train(X, y, split=5)
Akhil Apr 11

nice work!

1