Files
benchcoach-django/gamechanger/utils/gamechanger.py

220 lines
7.0 KiB
Python

import csv
import datetime
import json
import re
import pytz
import requests
from bs4 import BeautifulSoup
url = "https://gc.com/t/{season_id}/{team_id}/{page}"
def get_authenticated_session(request):
gc_username = request.user.gamechanger_account.user
gc_password = request.user.gamechanger_account.password
s = requests.Session()
s.headers.update({"referer": "https://gc.com/do-login"})
s.get("https://gc.com/login")
r2 = s.post(
"https://gc.com/do-login",
cookies=s.cookies,
data={
"csrfmiddlewaretoken": s.cookies.get("csrftoken"),
"email": gc_username,
"password": gc_password,
},
)
if r2.status_code == 200:
return s
else:
raise requests.exceptions.RequestException(
f"Returned {r2.status_code} for {r2.reason}"
)
def submit_lineup(request, lineup):
authenticated_session = get_authenticated_session(request)
season_id = request.user.gamechanger_preferences.season_id
team_id = request.user.gamechanger_preferences.team_id
authenticated_session.headers.update(
{
"referer": url.format(
season_id=season_id, team_id=team_id, page="lineup_edit"
),
"x-csrftoken": authenticated_session.cookies.get("csrftoken"),
"Content-Type": "application/x-www-form-urlencoded;",
}
)
r = authenticated_session.post(
cookies=authenticated_session.cookies,
url="https://gc.com/do-save-lineup/{team_id}".format(
team_id=team_id.split("-").pop()
),
json={"lineup": lineup},
)
if r.status_code == 200:
return r
else:
raise requests.exceptions.RequestException(
f"Returned {r.status_code} for {r.reason}"
)
def scrape_page(season_id, team_id, page):
r = requests.get(url.format(season_id=season_id, team_id=team_id, page=page))
initialize_page_json = re.search(
r'page.initialize\(\$.parseJSON\("(.*?)"\)', r.content.decode("unicode_escape")
)
m = initialize_page_json.group(1)
return json.loads(m)
def get_teams(session):
url = "https://gc.com/account/teams"
session.headers.update(
{
"referer": url.format("https://gc.com/account/profile"),
"x-csrftoken": session.cookies.get("csrftoken"),
}
)
page = session.get(cookies=session.cookies, url=url)
soup = BeautifulSoup(page.content, "html.parser")
team_elements = [i for i in soup.find_all("li") if i.attrs.get("data-team-id")]
teams = []
for i, team_element in enumerate(team_elements):
league_type, number_of_games = [
c.text.strip() for c in team_element.findChildren("li")
][1:3]
season_slug, team_slug = (
team_element.find("a").attrs.get("href", "///").split("/")[2:]
)
teams.append(
{
"name": team_element.find("a").text,
"id": team_element.attrs.get("data-team-id"),
"season": team_element.findPrevious("header").text,
"league_type": league_type,
"number_of_games": number_of_games,
"season_slug": season_slug,
"team_slug": team_slug,
}
)
return teams
pass
def get_events(request):
authenticated_session = get_authenticated_session(request)
season_id = request.user.gamechanger_preferences.season_id
team_id = request.user.gamechanger_preferences.team_id
page = "stats/batting/Qualified/standard/csv"
authenticated_session.get(
url.format(season_id=season_id, team_id=team_id, page=page)
)
authenticated_session.headers.update(
{
"x-csrftoken": authenticated_session.cookies.get("csrftoken"),
}
)
page = authenticated_session.get(
cookies=authenticated_session.cookies,
url=url.format(season_id=season_id, team_id=team_id, page="schedule/games"),
)
soup = BeautifulSoup(page.content, "html.parser")
game_elements = [r for r in soup.find_all("tr") if "game" in r.attrs.get("class")]
games = []
for i, game_element in enumerate(game_elements):
game_slug = game_element.find("a").attrs.get("href").split("/")[1]
title = game_element.find("a").text
jslocaldate, jslocaltime_start, jslocaltime_arrival = (
t.attrs.get("datetime") for t in game_element.findAll("time")
)
games.append(
{
"id": game_element.attrs.get("data-id"),
"title": title,
"game_slug": game_slug,
"start": pytz.timezone("utc").localize(
datetime.datetime.fromisoformat(jslocaltime_start)
),
}
)
return games
def stream():
# game_page = authenticated_session.get(
# cookies=authenticated_session.cookies,
# url=f"https://gc.com/{game_slug}",
# )
# game_soup = BeautifulSoup(game_page.content, "html.parser")
# data_push_url_rel = game_soup.find("body").attrs.get("data-push-url")[2:]
# data_push_url = f"https://{data_push_url_rel}?sabertooth_aware=true"
# stream_page = authenticated_session.get(
# cookies=authenticated_session.cookies, url=data_push_url
# )
# game_stream = json.loads(stream_page.content)
pass
def stats(request):
authenticated_session = get_authenticated_session(request)
season_id = request.user.gamechanger_preferences.season_id
team_id = request.user.gamechanger_preferences.team_id
page = "stats/batting/Qualified/standard/csv"
r = authenticated_session.get(
url.format(season_id=season_id, team_id=team_id, page=page)
)
roster = scrape_page(season_id, team_id, "roster")
id_lookup = {
(p.get("fname"), p.get("lname")): p.get("player_id") for p in roster["roster"]
}
decoded_content = r.content.decode("utf-8")
cr = csv.reader(decoded_content.splitlines(), delimiter=",")
my_list = list(cr)
player_keys = [
(i, key)
for i, key in enumerate(my_list[1][: my_list[0].index("Offensive Stats")])
]
offensive_keys = [
(i, key)
for i, key in enumerate(
my_list[1][
my_list[0]
.index("Offensive Stats") : my_list[0]
.index("Defensive Stats")
- 1
],
start=my_list[0].index("Offensive Stats"),
)
]
defensive_keys = [
(i, key)
for i, key in enumerate(
my_list[1][my_list[0].index("Defensive Stats") :],
start=my_list[0].index("Defensive Stats"),
)
]
stats = {}
for row in my_list[2:]:
player_keys
number, lname, fname = row[:3]
if number == "Team":
break
gamechanger_id = id_lookup[(fname, lname)]
stats[gamechanger_id] = {
"offensive": {k: row[i] for i, k in offensive_keys},
"defensive": {k: row[i] for i, k in defensive_keys},
}
return stats
# d = scrape_page(season_id, team_id, page)
pass