import csv import json import re import requests url = "https://gc.com/t/{season_id}/{team_id}/{page}" def get_authenticated_session(request): gc_username = request.user.gamechanger_account.user gc_password = request.user.gamechanger_account.password s = requests.Session() s.headers.update({"referer": "https://gc.com/do-login"}) s.get("https://gc.com/login") r2 = s.post( "https://gc.com/do-login", cookies=s.cookies, data={ "csrfmiddlewaretoken": s.cookies.get("csrftoken"), "email": gc_username, "password": gc_password, }, ) if r2.status_code == 200: return s else: raise requests.exceptions.RequestException( f"Returned {r2.status_code} for {r2.reason}" ) def submit_lineup(request, lineup): authenticated_session = get_authenticated_session(request) season_id = request.user.gamechanger_preferences.season_id team_id = request.user.gamechanger_preferences.team_id authenticated_session.headers.update( { "referer": url.format( season_id=season_id, team_id=team_id, page="lineup_edit" ), "x-csrftoken": authenticated_session.cookies.get("csrftoken"), "Content-Type": "application/x-www-form-urlencoded;", } ) r = authenticated_session.post( cookies=authenticated_session.cookies, url="https://gc.com/do-save-lineup/{team_id}".format( team_id=team_id.split("-").pop() ), json={"lineup": lineup}, ) if r.status_code == 200: return r else: raise requests.exceptions.RequestException( f"Returned {r.status_code} for {r.reason}" ) def scrape_page(season_id, team_id, page): r = requests.get(url.format(season_id=season_id, team_id=team_id, page=page)) initialize_page_json = re.search( r'page.initialize\(\$.parseJSON\("(.*?)"\)', r.content.decode("unicode_escape") ) m = initialize_page_json.group(1) return json.loads(m) def stats(request): authenticated_session = get_authenticated_session(request) season_id = request.user.gamechanger_preferences.season_id team_id = request.user.gamechanger_preferences.team_id page = "stats/batting/Qualified/standard/csv" r = authenticated_session.get( url.format(season_id=season_id, team_id=team_id, page=page) ) roster = scrape_page(season_id, team_id, "roster") id_lookup = { (p.get("fname"), p.get("lname")): p.get("player_id") for p in roster["roster"] } decoded_content = r.content.decode("utf-8") cr = csv.reader(decoded_content.splitlines(), delimiter=",") my_list = list(cr) player_keys = [ (i, key) for i, key in enumerate(my_list[1][: my_list[0].index("Offensive Stats")]) ] offensive_keys = [ (i, key) for i, key in enumerate( my_list[1][ my_list[0] .index("Offensive Stats") : my_list[0] .index("Defensive Stats") - 1 ], start=my_list[0].index("Offensive Stats"), ) ] defensive_keys = [ (i, key) for i, key in enumerate( my_list[1][my_list[0].index("Defensive Stats") :], start=my_list[0].index("Defensive Stats"), ) ] stats = {} for row in my_list[2:]: player_keys number, lname, fname = row[:3] if number == "Team": break gamechanger_id = id_lookup[(fname, lname)] stats[gamechanger_id] = { "offensive": {k: row[i] for i, k in offensive_keys}, "defensive": {k: row[i] for i, k in defensive_keys}, } return stats # d = scrape_page(season_id, team_id, page) pass