-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathplayer.py
49 lines (42 loc) · 1.3 KB
/
player.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# Import libraries
import sys
import requests
from bs4 import BeautifulSoup
import numpy as np
import pandas as pd
columns = ["Player"]
def print_full(x):
pd.set_option('display.max_rows', len(x))
print(x)
pd.reset_option('display.max_rows')
def scrape():
# df = pd.read_html('https://www.fantasypros.com/nfl/stats/qb.php')
# print_full(df)
df = pd.read_csv("KILLME.csv")
# df.drop([0])
# df.to_csv("KILLME.csv")
coolShit = df[["Rank", "Player"]]
print_full(coolShit)
## Create an URL object
#url = 'https://fantasydata.com/nfl/fantasy-football-leaders'
## Create object page
#page = requests.get(url)
#soup = BeautifulSoup(page.text, 'lxml')
#coolShit = soup.find_all('tbody')
#print(coolShit)
#txt.write(str(coolShit))
#
#headers = []
#for i in table1.find_all("th"):
# title = i.text
# headers.append(title)
#mydata = pd.DataFrame(columns=headers)
#for j in table1.find_all("tr")[1:]:
# row_data = j.find_all("td")
# row = [i.text for i in row_data]
# length = len(mydata)
# mydata.loc[length] = row
#mydata.drop(mydata.index[0:7], inplace=True)
#mydata.drop(mydata.index[222:229], inplace=True)
#mydata.reset_index(inplace=True, drop=True)
#mydata.to_csv("covid_data.csv", index = False)