from bs4 import BeautifulSoup
import requests, pandas, json, curses
from curses import wrapper

url = "https://docs.google.com/document/d/e/2PACX-1vQiVT_Jj04V35C-YRzvoqyEYYzdXHcRyMUZCVQRYCu6gQJX7hbNhJ5eFCMuoX47cAsDW2ZBYppUQITr/pub"


def process_document(url):
	x_data = []
	y_data = []
	x_tmp = []
	y_tmp = []

	response = requests.get(url)
	soup = BeautifulSoup(response.text, 'html.parser')
	#table = soup.find("table")
	#data = pandas.read_html(table, skiprows=1)
	tmp = soup.find("table", class_="c6")
	data = pandas.read_html(tmp, skiprows=1)
	#data = "lorum ipsum dollar sadsfgdfg gdfsdfsaf hgdfgsdasfd"
	curses.wrapper(curses_main, data)




def curses_main(stdscr, data):
	stdscr.clear()
	try:
		stdscr.addstr(10,10,data)
		stdscr.refresh()
		stdscr.getch()
		#stdscr.addstr(data[0][0], data[0][2], data[0][1])
		#stdscr.refresh()
	except curses.error as e:
		stdscr.addstr(0, 0, f"An error occurred: {e}")
		stdscr.refresh()

	return

process_document(url)