Search is not available for this dataset
text stringlengths 75 104k |
|---|
def choice_input(options=[], prompt='Press ENTER to continue.',
showopts=True, qopt=False):
"""Get input from a list of choices (q to quit)"""
choice = None
if showopts:
prompt = prompt + ' ' + str(options)
if qopt:
prompt = prompt + ' (q to quit)'
while not choice:
try:
choice = string_input(prompt + ' ')
except SyntaxError:
if options == []:
pass
if choice:
if choice in options:
return choice
elif qopt == True and choice == 'q':
choice = None
is_sure = string_input('Are you sure you want to quit? ')
if is_sure in ('Y', 'y', 'yes'):
exit('\nThanks for playing. Goodbye.\n')
elif options == []:
return 0
else:
print('Answer must be one of ' + str(options) +
'. Your answer?')
if options:
choice = None
elif options == []:
return 0
else:
print('Answer must be one of ' + str(options) +
'. Your answer?') |
def long_input(prompt='Multi-line input\n' + \
'Enter EOF on a blank line to end ' + \
'(ctrl-D in *nix, ctrl-Z in windows)',
maxlines = None, maxlength = None):
"""Get a multi-line string as input"""
lines = []
print(prompt)
lnum = 1
try:
while True:
if maxlines:
if lnum > maxlines:
break
else:
if maxlength:
lines.append(string_input('')[:maxlength])
else:
lines.append(string_input(''))
lnum += 1
else:
if maxlength:
lines.append(string_input('')[:maxlength])
else:
lines.append(string_input(''))
except EOFError:
pass
finally:
return '\n'.join(lines) |
def list_input(prompt='List input - enter each item on a seperate line\n' + \
'Enter EOF on a blank line to end ' + \
'(ctrl-D in *nix, ctrl-Z in windows)',
maxitems=None, maxlength=None):
"""Get a list of strings as input"""
lines = []
print(prompt)
inum = 1
try:
while True:
if maxitems:
if inum > maxitems:
break
else:
if maxlength:
lines.append(string_input('')[:maxlength])
else:
lines.append(string_input(''))
inum += 1
else:
if maxlength:
lines.append(string_input('')[:maxlength])
else:
lines.append(string_input(''))
except EOFError:
pass
finally:
return lines |
def outfile_input(extension=None):
"""Get an output file name as input"""
fileok = False
while not fileok:
filename = string_input('File name? ')
if extension:
if not filename.endswith(extension):
if extension.startswith('.'):
filename = filename + extension
else:
filename = filename + '.' + extension
if os.path.isfile(filename):
choice = choice_input(prompt=filename + \
' already exists. Overwrite?',
options=['y', 'n'])
if choice == 'y':
try:
nowtime = time.time()
with open(filename, 'a') as f:
os.utime(filename, (nowtime, nowtime))
fileok = True
except IOError:
print('Write permission denied on ' + filename + \
'. Try again.')
except PermissionError:
print('Write permission denied on ' + filename + \
'. Try again.')
except FileNotFoundError:
print(filename + ': directory not found. Try again.')
else:
choice = choice_input(
prompt=filename + ' does not exist. Create it?',
options=['y', 'n'])
if choice == 'y':
try:
nowtime = time.time()
with open(filename, 'w') as f:
os.utime(filename, (nowtime, nowtime))
fileok = True
except IOError:
print('Write permission denied on ' + filename + \
'. Try again.')
except PermissionError:
print('Write permission denied on ' + filename + \
'. Try again.')
except FileNotFoundError:
print(filename + ': directory not found. Try again.')
return filename |
def roster(self, year):
"""Returns the roster table for the given year.
:year: The year for which we want the roster; defaults to current year.
:returns: A DataFrame containing roster information for that year.
"""
doc = self.get_year_doc(year)
table = doc('table#roster')
df = sportsref.utils.parse_table(table)
df['years_experience'] = (df['years_experience']
.replace('R', 0).replace('', np.nan).astype(float))
return df |
def schedule(self, year):
"""Gets schedule information for a team-season.
:year: The year for which we want the schedule.
:returns: DataFrame of schedule information.
"""
doc = self.get_year_doc('{}_games'.format(year))
table = doc('table#games')
df = sportsref.utils.parse_table(table)
return df |
def date(self):
"""Returns the date of the game. See Python datetime.date documentation
for more.
:returns: A datetime.date object with year, month, and day attributes.
"""
match = re.match(r'(\d{4})(\d{2})(\d{2})', self.boxscore_id)
year, month, day = map(int, match.groups())
return datetime.date(year=year, month=month, day=day) |
def weekday(self):
"""Returns the day of the week on which the game occurred.
:returns: String representation of the day of the week for the game.
"""
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
date = self.date()
wd = date.weekday()
return days[wd] |
def home(self):
"""Returns home team ID.
:returns: 3-character string representing home team's ID.
"""
doc = self.get_doc()
table = doc('table.linescore')
relURL = table('tr').eq(2)('a').eq(2).attr['href']
home = sportsref.utils.rel_url_to_id(relURL)
return home |
def home_score(self):
"""Returns score of the home team.
:returns: int of the home score.
"""
doc = self.get_doc()
table = doc('table.linescore')
home_score = table('tr').eq(2)('td')[-1].text_content()
return int(home_score) |
def away_score(self):
"""Returns score of the away team.
:returns: int of the away score.
"""
doc = self.get_doc()
table = doc('table.linescore')
away_score = table('tr').eq(1)('td')[-1].text_content()
return int(away_score) |
def winner(self):
"""Returns the team ID of the winning team. Returns NaN if a tie."""
hmScore = self.home_score()
awScore = self.away_score()
if hmScore > awScore:
return self.home()
elif hmScore < awScore:
return self.away()
else:
return None |
def week(self):
"""Returns the week in which this game took place. 18 is WC round, 19
is Div round, 20 is CC round, 21 is SB.
:returns: Integer from 1 to 21.
"""
doc = self.get_doc()
raw = doc('div#div_other_scores h2 a').attr['href']
match = re.match(
r'/years/{}/week_(\d+)\.htm'.format(self.season()), raw
)
if match:
return int(match.group(1))
else:
return 21 |
def season(self):
"""
Returns the year ID of the season in which this game took place.
Useful for week 17 January games.
:returns: An int representing the year of the season.
"""
date = self.date()
return date.year - 1 if date.month <= 3 else date.year |
def starters(self):
"""Returns a DataFrame where each row is an entry in the starters table
from PFR.
The columns are:
* player_id - the PFR player ID for the player (note that this column
is not necessarily all unique; that is, one player can be a starter in
multiple positions, in theory).
* playerName - the listed name of the player; this too is not
necessarily unique.
* position - the position at which the player started for their team.
* team - the team for which the player started.
* home - True if the player's team was at home, False if they were away
* offense - True if the player is starting on an offensive position,
False if defense.
:returns: A pandas DataFrame. See the description for details.
"""
doc = self.get_doc()
a = doc('table#vis_starters')
h = doc('table#home_starters')
data = []
for h, table in enumerate((a, h)):
team = self.home() if h else self.away()
for i, row in enumerate(table('tbody tr').items()):
datum = {}
datum['player_id'] = sportsref.utils.rel_url_to_id(
row('a')[0].attrib['href']
)
datum['playerName'] = row('th').text()
datum['position'] = row('td').text()
datum['team'] = team
datum['home'] = (h == 1)
datum['offense'] = (i <= 10)
data.append(datum)
return pd.DataFrame(data) |
def surface(self):
"""The playing surface on which the game was played.
:returns: string representing the type of surface. Returns np.nan if
not avaiable.
"""
doc = self.get_doc()
table = doc('table#game_info')
giTable = sportsref.utils.parse_info_table(table)
return giTable.get('surface', np.nan) |
def over_under(self):
"""
Returns the over/under for the game as a float, or np.nan if not
available.
"""
doc = self.get_doc()
table = doc('table#game_info')
giTable = sportsref.utils.parse_info_table(table)
if 'over_under' in giTable:
ou = giTable['over_under']
return float(ou.split()[0])
else:
return None |
def coin_toss(self):
"""Gets information relating to the opening coin toss.
Keys are:
* wonToss - contains the ID of the team that won the toss
* deferred - bool whether the team that won the toss deferred it
:returns: Dictionary of coin toss-related info.
"""
doc = self.get_doc()
table = doc('table#game_info')
giTable = sportsref.utils.parse_info_table(table)
if 'Won Toss' in giTable:
# TODO: finish coinToss function
pass
else:
return None |
def weather(self):
"""Returns a dictionary of weather-related info.
Keys of the returned dict:
* temp
* windChill
* relHumidity
* windMPH
:returns: Dict of weather data.
"""
doc = self.get_doc()
table = doc('table#game_info')
giTable = sportsref.utils.parse_info_table(table)
if 'weather' in giTable:
regex = (
r'(?:(?P<temp>\-?\d+) degrees )?'
r'(?:relative humidity (?P<relHumidity>\d+)%, )?'
r'(?:wind (?P<windMPH>\d+) mph, )?'
r'(?:wind chill (?P<windChill>\-?\d+))?'
)
m = re.match(regex, giTable['weather'])
d = m.groupdict()
# cast values to int
for k in d:
try:
d[k] = int(d[k])
except TypeError:
pass
# one-off fixes
d['windChill'] = (d['windChill'] if pd.notnull(d['windChill'])
else d['temp'])
d['windMPH'] = d['windMPH'] if pd.notnull(d['windMPH']) else 0
return d
else:
# no weather found, because it's a dome
# TODO: what's relative humidity in a dome?
return {
'temp': 70, 'windChill': 70, 'relHumidity': None, 'windMPH': 0
} |
def pbp(self):
"""Returns a dataframe of the play-by-play data from the game.
Order of function calls:
1. parse_table on the play-by-play table
2. expand_details
- calls parse_play_details & _clean_features
3. _add_team_columns
4. various fixes to clean data
5. _add_team_features
:returns: pandas DataFrame of play-by-play. Similar to GPF.
"""
doc = self.get_doc()
table = doc('table#pbp')
df = sportsref.utils.parse_table(table)
# make the following features conveniently available on each row
df['boxscore_id'] = self.boxscore_id
df['home'] = self.home()
df['away'] = self.away()
df['season'] = self.season()
df['week'] = self.week()
feats = sportsref.nfl.pbp.expand_details(df)
# add team and opp columns by iterating through rows
df = sportsref.nfl.pbp._add_team_columns(feats)
# add WPA column (requires diff, can't be done row-wise)
df['home_wpa'] = df.home_wp.diff()
# lag score columns, fill in 0-0 to start
for col in ('home_wp', 'pbp_score_hm', 'pbp_score_aw'):
if col in df.columns:
df[col] = df[col].shift(1)
df.loc[0, ['pbp_score_hm', 'pbp_score_aw']] = 0
# fill in WP NaN's
df.home_wp.fillna(method='ffill', inplace=True)
# fix first play border after diffing/shifting for WP and WPA
firstPlaysOfGame = df[df.secsElapsed == 0].index
line = self.line()
for i in firstPlaysOfGame:
initwp = sportsref.nfl.winProb.initialWinProb(line)
df.loc[i, 'home_wp'] = initwp
df.loc[i, 'home_wpa'] = df.loc[i + 1, 'home_wp'] - initwp
# fix last play border after diffing/shifting for WP and WPA
lastPlayIdx = df.index[-1]
lastPlayWP = df.loc[lastPlayIdx, 'home_wp']
# if a tie, final WP is 50%; otherwise, determined by winner
winner = self.winner()
finalWP = 50. if pd.isnull(winner) else (winner == self.home()) * 100.
df.loc[lastPlayIdx, 'home_wpa'] = finalWP - lastPlayWP
# fix WPA for timeouts and plays after timeouts
timeouts = df[df.isTimeout].index
for to in timeouts:
df.loc[to, 'home_wpa'] = 0.
if to + 2 in df.index:
wpa = df.loc[to + 2, 'home_wp'] - df.loc[to + 1, 'home_wp']
else:
wpa = finalWP - df.loc[to + 1, 'home_wp']
df.loc[to + 1, 'home_wpa'] = wpa
# add team-related features to DataFrame
df = sportsref.nfl.pbp._add_team_features(df)
# fill distToGoal NaN's
df['distToGoal'] = np.where(df.isKickoff, 65, df.distToGoal)
df.distToGoal.fillna(method='bfill', inplace=True)
df.distToGoal.fillna(method='ffill', inplace=True) # for last play
return df |
def ref_info(self):
"""Gets a dictionary of ref positions and the ref IDs of the refs for
that game.
:returns: A dictionary of ref positions and IDs.
"""
doc = self.get_doc()
table = doc('table#officials')
return sportsref.utils.parse_info_table(table) |
def player_stats(self):
"""Gets the stats for offense, defense, returning, and kicking of
individual players in the game.
:returns: A DataFrame containing individual player stats.
"""
doc = self.get_doc()
tableIDs = ('player_offense', 'player_defense', 'returns', 'kicking')
dfs = []
for tID in tableIDs:
table = doc('table#{}'.format(tID))
dfs.append(sportsref.utils.parse_table(table))
dfs = [df for df in dfs if not df.empty]
df = reduce(
lambda x, y: pd.merge(
x, y, how='outer', on=list(set(x.columns) & set(y.columns))
), dfs
).reset_index(drop=True)
return df |
def snap_counts(self):
"""Gets the snap counts for both teams' players and returns them in a
DataFrame. Note: only goes back to 2012.
:returns: DataFrame of snap count data
"""
# TODO: combine duplicate players, see 201312150mia - ThomDa03
doc = self.get_doc()
table_ids = ('vis_snap_counts', 'home_snap_counts')
tms = (self.away(), self.home())
df = pd.concat([
sportsref.utils.parse_table(doc('table#{}'.format(table_id)))
.assign(is_home=bool(i), team=tms[i], opp=tms[i*-1+1])
for i, table_id in enumerate(table_ids)
])
if df.empty:
return df
return df.set_index('player_id') |
def get_main_doc(self):
"""Returns PyQuery object for the main season URL.
:returns: PyQuery object.
"""
url = (sportsref.nba.BASE_URL +
'/leagues/NBA_{}.html'.format(self.yr))
return pq(sportsref.utils.get_html(url)) |
def get_sub_doc(self, subpage):
"""Returns PyQuery object for a given subpage URL.
:subpage: The subpage of the season, e.g. 'per_game'.
:returns: PyQuery object.
"""
html = sportsref.utils.get_html(self._subpage_url(subpage))
return pq(html) |
def get_team_ids(self):
"""Returns a list of the team IDs for the given year.
:returns: List of team IDs.
"""
df = self.team_stats_per_game()
if not df.empty:
return df.index.tolist()
else:
print('ERROR: no teams found')
return [] |
def team_ids_to_names(self):
"""Mapping from 3-letter team IDs to full team names.
:returns: Dictionary with team IDs as keys and full team strings as
values.
"""
doc = self.get_main_doc()
table = doc('table#team-stats-per_game')
flattened = sportsref.utils.parse_table(table, flatten=True)
unflattened = sportsref.utils.parse_table(table, flatten=False)
team_ids = flattened['team_id']
team_names = unflattened['team_name']
if len(team_names) != len(team_ids):
raise Exception("team names and team IDs don't align")
return dict(zip(team_ids, team_names)) |
def team_names_to_ids(self):
"""Mapping from full team names to 3-letter team IDs.
:returns: Dictionary with tean names as keys and team IDs as values.
"""
d = self.team_ids_to_names()
return {v: k for k, v in d.items()} |
def schedule(self, kind='R'):
"""Returns a list of BoxScore IDs for every game in the season.
Only needs to handle 'R' or 'P' options because decorator handles 'B'.
:param kind: 'R' for regular season, 'P' for playoffs, 'B' for both.
Defaults to 'R'.
:returns: DataFrame of schedule information.
:rtype: pd.DataFrame
"""
kind = kind.upper()[0]
dfs = []
# get games from each month
for month in ('october', 'november', 'december', 'january', 'february',
'march', 'april', 'may', 'june'):
try:
doc = self.get_sub_doc('games-{}'.format(month))
except ValueError:
continue
table = doc('table#schedule')
df = sportsref.utils.parse_table(table)
dfs.append(df)
df = pd.concat(dfs).reset_index(drop=True)
# figure out how many regular season games
try:
sportsref.utils.get_html('{}/playoffs/NBA_{}.html'.format(
sportsref.nba.BASE_URL, self.yr)
)
is_past_season = True
except ValueError:
is_past_season = False
if is_past_season:
team_per_game = self.team_stats_per_game()
n_reg_games = int(team_per_game.g.sum() // 2)
else:
n_reg_games = len(df)
# subset appropriately based on `kind`
if kind == 'P':
return df.iloc[n_reg_games:]
else:
return df.iloc[:n_reg_games] |
def standings(self):
"""Returns a DataFrame containing standings information."""
doc = self.get_sub_doc('standings')
east_table = doc('table#divs_standings_E')
east_df = pd.DataFrame(sportsref.utils.parse_table(east_table))
east_df.sort_values('wins', ascending=False, inplace=True)
east_df['seed'] = range(1, len(east_df) + 1)
east_df['conference'] = 'E'
west_table = doc('table#divs_standings_W')
west_df = sportsref.utils.parse_table(west_table)
west_df.sort_values('wins', ascending=False, inplace=True)
west_df['seed'] = range(1, len(west_df) + 1)
west_df['conference'] = 'W'
full_df = pd.concat([east_df, west_df], axis=0).reset_index(drop=True)
full_df['team_id'] = full_df.team_id.str.extract(r'(\w+)\W*\(\d+\)', expand=False)
full_df['gb'] = [gb if isinstance(gb, int) or isinstance(gb, float) else 0
for gb in full_df['gb']]
full_df = full_df.drop('has_class_full_table', axis=1)
expanded_table = doc('table#expanded_standings')
expanded_df = sportsref.utils.parse_table(expanded_table)
full_df = pd.merge(full_df, expanded_df, on='team_id')
return full_df |
def _get_team_stats_table(self, selector):
"""Helper function for stats tables on season pages. Returns a
DataFrame."""
doc = self.get_main_doc()
table = doc(selector)
df = sportsref.utils.parse_table(table)
df.set_index('team_id', inplace=True)
return df |
def _get_player_stats_table(self, identifier):
"""Helper function for player season stats.
:identifier: string identifying the type of stat, e.g. 'per_game'.
:returns: A DataFrame of stats.
"""
doc = self.get_sub_doc(identifier)
table = doc('table#{}_stats'.format(identifier))
df = sportsref.utils.parse_table(table)
return df |
def roy_voting(self):
"""Returns a DataFrame containing information about ROY voting."""
url = '{}/awards/awards_{}.html'.format(sportsref.nba.BASE_URL, self.yr)
doc = pq(sportsref.utils.get_html(url))
table = doc('table#roy')
df = sportsref.utils.parse_table(table)
return df |
def linescore(self):
"""Returns the linescore for the game as a DataFrame."""
doc = self.get_main_doc()
table = doc('table#line_score')
columns = [th.text() for th in table('tr.thead').items('th')]
columns[0] = 'team_id'
data = [
[sportsref.utils.flatten_links(td) for td in tr('td').items()]
for tr in table('tr.thead').next_all('tr').items()
]
return pd.DataFrame(data, index=['away', 'home'],
columns=columns, dtype='float') |
def season(self):
"""
Returns the year ID of the season in which this game took place.
:returns: An int representing the year of the season.
"""
d = self.date()
if d.month >= 9:
return d.year + 1
else:
return d.year |
def _get_player_stats(self, table_id_fmt):
"""Returns a DataFrame of player stats from the game (either basic or
advanced, depending on the argument.
:param table_id_fmt: Format string for str.format with a placeholder
for the team ID (e.g. 'box_{}_basic')
:returns: DataFrame of player stats
"""
# get data
doc = self.get_main_doc()
tms = self.away(), self.home()
tm_ids = [table_id_fmt.format(tm) for tm in tms]
tables = [doc('table#{}'.format(tm_id).lower()) for tm_id in tm_ids]
dfs = [sportsref.utils.parse_table(table) for table in tables]
# clean data and add features
for i, (tm, df) in enumerate(zip(tms, dfs)):
no_time = df['mp'] == 0
stat_cols = [col for col, dtype in df.dtypes.items()
if dtype != 'object']
df.loc[no_time, stat_cols] = 0
df['team_id'] = tm
df['is_home'] = i == 1
df['is_starter'] = [p < 5 for p in range(df.shape[0])]
df.drop_duplicates(subset='player_id', keep='first', inplace=True)
return pd.concat(dfs) |
def pbp(self, dense_lineups=False, sparse_lineups=False):
"""Returns a dataframe of the play-by-play data from the game.
:param dense_lineups: If True, adds 10 columns containing the names of
the players on the court. Defaults to False.
:param sparse_lineups: If True, adds binary columns denoting whether a
given player is in the game at the time of a pass. Defaults to
False.
:returns: pandas DataFrame of play-by-play. Similar to GPF.
"""
try:
doc = self.get_subpage_doc('pbp')
except:
raise ValueError(
'Error fetching PBP subpage for boxscore {}'
.format(self.boxscore_id)
)
table = doc('table#pbp')
trs = [
tr for tr in table('tr').items()
if (not tr.attr['class'] or # regular data rows
tr.attr['id'] and tr.attr['id'].startswith('q')) # qtr bounds
]
rows = [tr.children('td') for tr in trs]
n_rows = len(trs)
data = []
cur_qtr = 0
bsid = self.boxscore_id
for i in range(n_rows):
tr = trs[i]
row = rows[i]
p = {}
# increment cur_qtr when we hit a new quarter
if tr.attr['id'] and tr.attr['id'].startswith('q'):
assert int(tr.attr['id'][1:]) == cur_qtr + 1
cur_qtr += 1
continue
# add time of play to entry
t_str = row.eq(0).text()
t_regex = r'(\d+):(\d+)\.(\d+)'
mins, secs, tenths = map(int, re.match(t_regex, t_str).groups())
endQ = (12 * 60 * min(cur_qtr, 4) +
5 * 60 * (cur_qtr - 4 if cur_qtr > 4 else 0))
secsElapsed = endQ - (60 * mins + secs + 0.1 * tenths)
p['secs_elapsed'] = secsElapsed
p['clock_time'] = t_str
p['quarter'] = cur_qtr
# handle single play description
# ex: beginning/end of quarter, jump ball
if row.length == 2:
desc = row.eq(1)
# handle jump balls
if desc.text().lower().startswith('jump ball: '):
p['is_jump_ball'] = True
jb_str = sportsref.utils.flatten_links(desc)
p.update(
sportsref.nba.pbp.parse_play(bsid, jb_str, None)
)
# ignore rows marking beginning/end of quarters
elif (
desc.text().lower().startswith('start of ') or
desc.text().lower().startswith('end of ')
):
continue
# if another case, log and continue
else:
if not desc.text().lower().startswith('end of '):
print(
'{}, Q{}, {} other case: {}'
.format(self.boxscore_id, cur_qtr,
t_str, desc.text())
)
continue
# handle team play description
# ex: shot, turnover, rebound, foul, sub, etc.
elif row.length == 6:
aw_desc, hm_desc = row.eq(1), row.eq(5)
is_hm_play = bool(hm_desc.text())
desc = hm_desc if is_hm_play else aw_desc
desc = sportsref.utils.flatten_links(desc)
# parse the play
new_p = sportsref.nba.pbp.parse_play(bsid, desc, is_hm_play)
if not new_p:
continue
elif isinstance(new_p, list):
# this happens when a row needs to be expanded to 2 rows;
# ex: double personal foul -> two PF rows
# first, update and append the first row
orig_p = dict(p)
p.update(new_p[0])
data.append(p)
# second, set up the second row to be appended below
p = orig_p
new_p = new_p[1]
elif new_p.get('is_error'):
print("can't parse: {}, boxscore: {}"
.format(desc, self.boxscore_id))
# import pdb; pdb.set_trace()
p.update(new_p)
# otherwise, I don't know what this was
else:
raise Exception(("don't know how to handle row of length {}"
.format(row.length)))
data.append(p)
# convert to DataFrame and clean columns
df = pd.DataFrame.from_records(data)
df.sort_values('secs_elapsed', inplace=True, kind='mergesort')
df = sportsref.nba.pbp.clean_features(df)
# add columns for home team, away team, boxscore_id, date
away, home = self.away(), self.home()
df['home'] = home
df['away'] = away
df['boxscore_id'] = self.boxscore_id
df['season'] = self.season()
date = self.date()
df['year'] = date.year
df['month'] = date.month
df['day'] = date.day
def _clean_rebs(df):
df.reset_index(drop=True, inplace=True)
no_reb_after = (
(df.fta_num < df.tot_fta) | df.is_ftm |
df.get('is_tech_fta', False)
).shift(1).fillna(False)
no_reb_before = (
(df.fta_num == df.tot_fta)
).shift(-1).fillna(False)
se_end_qtr = df.loc[
df.clock_time == '0:00.0', 'secs_elapsed'
].unique()
no_reb_when = df.secs_elapsed.isin(se_end_qtr)
drop_mask = (
(df.rebounder == 'Team') &
(no_reb_after | no_reb_before | no_reb_when)
).nonzero()[0]
df.drop(drop_mask, axis=0, inplace=True)
df.reset_index(drop=True, inplace=True)
return df
# get rid of 'rebounds' after FTM, non-final FTA, or tech FTA
df = _clean_rebs(df)
# track possession number for each possession
# TODO: see 201604130PHO, secs_elapsed == 2756
# things that end a poss:
# FGM, dreb, TO, end of Q, made last FT, lost jump ball,
# def goaltending, shot clock violation
new_poss = (df.off_team == df.home).diff().fillna(False)
# def rebound considered part of the new possession
df['poss_id'] = np.cumsum(new_poss) + df.is_dreb
# create poss_id with rebs -> new possessions for granular groupbys
poss_id_reb = np.cumsum(new_poss | df.is_reb)
# make sure plays with the same clock time are in the right order
# TODO: make sort_cols depend on what cols are in the play?
# or combine related plays, like and-1 shot and foul
# issues come up with FGA after timeout in 201604130LAL
# issues come up with PF between FGA and DREB in 201604120SAS
sort_cols = [col for col in
['is_reb', 'is_fga', 'is_pf', 'is_tech_foul',
'is_ejection', 'is_tech_fta', 'is_timeout', 'is_pf_fta',
'fta_num', 'is_viol', 'is_to', 'is_jump_ball', 'is_sub']
if col in df.columns]
asc_true = ['fta_num']
ascend = [(col in asc_true) for col in sort_cols]
for label, group in df.groupby([df.secs_elapsed, poss_id_reb]):
if len(group) > 1:
df.loc[group.index, :] = group.sort_values(
sort_cols, ascending=ascend, kind='mergesort'
).values
# 2nd pass: get rid of 'rebounds' after FTM, non-final FTA, etc.
df = _clean_rebs(df)
# makes sure off/def and poss_id are correct for subs after rearranging
# some possessions above
df.loc[df['is_sub'], ['off_team', 'def_team', 'poss_id']] = np.nan
df.off_team.fillna(method='bfill', inplace=True)
df.def_team.fillna(method='bfill', inplace=True)
df.poss_id.fillna(method='bfill', inplace=True)
# make off_team and def_team NaN for jump balls
if 'is_jump_ball' in df.columns:
df.loc[df['is_jump_ball'], ['off_team', 'def_team']] = np.nan
# make sure 'off_team' is always the team shooting FTs, even on techs
# (impt for keeping track of the score)
if 'is_tech_fta' in df.columns:
tech_fta = df['is_tech_fta']
df.loc[tech_fta, 'off_team'] = df.loc[tech_fta, 'fta_team']
df.loc[tech_fta, 'def_team'] = np.where(
df.loc[tech_fta, 'off_team'] == home, away, home
)
df.drop('fta_team', axis=1, inplace=True)
# redefine poss_id_reb
new_poss = (df.off_team == df.home).diff().fillna(False)
poss_id_reb = np.cumsum(new_poss | df.is_reb)
# get rid of redundant subs
for (se, tm, pnum), group in df[df.is_sub].groupby(
[df.secs_elapsed, df.sub_team, poss_id_reb]
):
if len(group) > 1:
sub_in = set()
sub_out = set()
# first, figure out who's in and who's out after subs
for i, row in group.iterrows():
if row['sub_in'] in sub_out:
sub_out.remove(row['sub_in'])
else:
sub_in.add(row['sub_in'])
if row['sub_out'] in sub_in:
sub_in.remove(row['sub_out'])
else:
sub_out.add(row['sub_out'])
assert len(sub_in) == len(sub_out)
# second, add those subs
n_subs = len(sub_in)
for idx, p_in, p_out in zip(
group.index[:n_subs], sub_in, sub_out
):
assert df.loc[idx, 'is_sub']
df.loc[idx, 'sub_in'] = p_in
df.loc[idx, 'sub_out'] = p_out
df.loc[idx, 'sub_team'] = tm
df.loc[idx, 'detail'] = (
'{} enters the game for {}'.format(p_in, p_out)
)
# third, if applicable, remove old sub entries when there are
# redundant subs
n_extra = len(group) - len(sub_in)
if n_extra:
extra_idxs = group.index[-n_extra:]
df.drop(extra_idxs, axis=0, inplace=True)
df.reset_index(drop=True, inplace=True)
# add column for pts and score
df['pts'] = (df['is_ftm'] + 2 * df['is_fgm'] +
(df['is_fgm'] & df['is_three']))
df['hm_pts'] = np.where(df.off_team == df.home, df.pts, 0)
df['aw_pts'] = np.where(df.off_team == df.away, df.pts, 0)
df['hm_score'] = np.cumsum(df['hm_pts'])
df['aw_score'] = np.cumsum(df['aw_pts'])
# more helpful columns
# "play" is differentiated from "poss" by counting OReb as new play
# "plays" end with non-and1 FGA, TO, last non-tech FTA, or end of qtr
# (or double lane viol)
new_qtr = df.quarter.diff().shift(-1).fillna(False).astype(bool)
and1 = (df.is_fgm & df.is_pf.shift(-1).fillna(False) &
df.is_fta.shift(-2).fillna(False) &
~df.secs_elapsed.diff().shift(-1).fillna(False).astype(bool))
double_lane = (df.get('viol_type') == 'double lane')
new_play = df.eval('(is_fga & ~(@and1)) | is_to | @new_qtr |'
'(is_fta & ~is_tech_fta & fta_num == tot_fta) |'
'@double_lane')
df['play_id'] = np.cumsum(new_play).shift(1).fillna(0)
df['hm_off'] = df.off_team == df.home
# get lineup data
if dense_lineups:
df = pd.concat(
(df, sportsref.nba.pbp.get_dense_lineups(df)), axis=1
)
if sparse_lineups:
df = pd.concat(
(df, sportsref.nba.pbp.get_sparse_lineups(df)), axis=1
)
# TODO: add shot clock as a feature
return df |
def switch_to_dir(dirPath):
"""
Decorator that switches to given directory before executing function, and
then returning to orignal directory.
"""
def decorator(func):
@funcutils.wraps(func)
def wrapper(*args, **kwargs):
orig_cwd = os.getcwd()
os.chdir(dirPath)
ret = func(*args, **kwargs)
os.chdir(orig_cwd)
return ret
return wrapper
return decorator |
def cache(func):
"""Caches the HTML returned by the specified function `func`. Caches it in
the user cache determined by the appdirs package.
"""
CACHE_DIR = appdirs.user_cache_dir('sportsref', getpass.getuser())
if not os.path.isdir(CACHE_DIR):
os.makedirs(CACHE_DIR)
@funcutils.wraps(func)
def wrapper(url):
# hash based on the URL
file_hash = hashlib.md5()
encoded_url = url.encode(errors='replace')
file_hash.update(encoded_url)
file_hash = file_hash.hexdigest()
filename = '{}/{}'.format(CACHE_DIR, file_hash)
sport_id = None
for a_base_url, a_sport_id in sportsref.SITE_ABBREV.items():
if url.startswith(a_base_url):
sport_id = a_sport_id
break
else:
print('No sport ID found for {}, not able to check cache'.format(url))
# check whether cache is valid or stale
file_exists = os.path.isfile(filename)
if sport_id and file_exists:
cur_time = int(time.time())
mod_time = int(os.path.getmtime(filename))
days_since_mod = datetime.timedelta(seconds=(cur_time - mod_time)).days
days_cache_valid = globals()['_days_valid_{}'.format(sport_id)](url)
cache_is_valid = days_since_mod < days_cache_valid
else:
cache_is_valid = False
# if file found and cache is valid, read from file
allow_caching = sportsref.get_option('cache')
if file_exists and cache_is_valid and allow_caching:
with codecs.open(filename, 'r', encoding='utf-8', errors='replace') as f:
text = f.read()
# otherwise, execute function and cache results
else:
text = func(url)
with codecs.open(filename, 'w+', encoding='utf-8') as f:
f.write(text)
return text
return wrapper |
def get_class_instance_key(cls, args, kwargs):
"""
Returns a unique identifier for a class instantiation.
"""
l = [id(cls)]
for arg in args:
l.append(id(arg))
l.extend((k, id(v)) for k, v in kwargs.items())
return tuple(sorted(l)) |
def memoize(fun):
"""A decorator for memoizing functions.
Only works on functions that take simple arguments - arguments that take
list-like or dict-like arguments will not be memoized, and this function
will raise a TypeError.
"""
@funcutils.wraps(fun)
def wrapper(*args, **kwargs):
do_memoization = sportsref.get_option('memoize')
if not do_memoization:
return fun(*args, **kwargs)
hash_args = tuple(args)
hash_kwargs = frozenset(sorted(kwargs.items()))
key = (hash_args, hash_kwargs)
def _copy(v):
if isinstance(v, pq):
return v.clone()
else:
return copy.deepcopy(v)
try:
ret = _copy(cache[key])
return ret
except KeyError:
cache[key] = fun(*args, **kwargs)
ret = _copy(cache[key])
return ret
except TypeError:
print('memoization type error in function {} for arguments {}'
.format(fun.__name__, key))
raise
cache = {}
return wrapper |
def age(self, year, month=2, day=1):
"""Returns the age of the player on a given date.
:year: int representing the year.
:month: int representing the month (1-12).
:day: int representing the day within the month (1-31).
:returns: Age in years as a float.
"""
doc = self.get_main_doc()
date_string = doc('span[itemprop="birthDate"]').attr('data-birth')
regex = r'(\d{4})\-(\d{2})\-(\d{2})'
date_args = map(int, re.match(regex, date_string).groups())
birth_date = datetime.date(*date_args)
age_date = datetime.date(year=year, month=month, day=day)
delta = age_date - birth_date
age = delta.days / 365.
return age |
def height(self):
"""Returns the player's height (in inches).
:returns: An int representing a player's height in inches.
"""
doc = self.get_main_doc()
raw = doc('span[itemprop="height"]').text()
try:
feet, inches = map(int, raw.split('-'))
return feet * 12 + inches
except ValueError:
return None |
def weight(self):
"""Returns the player's weight (in pounds).
:returns: An int representing a player's weight in pounds.
"""
doc = self.get_main_doc()
raw = doc('span[itemprop="weight"]').text()
try:
weight = re.match(r'(\d+)lb', raw).group(1)
return int(weight)
except ValueError:
return None |
def hand(self):
"""Returns the player's handedness.
:returns: 'L' for left-handed, 'R' for right-handed.
"""
doc = self.get_main_doc()
hand = re.search(r'Shoots:\s*(L|R)', doc.text()).group(1)
return hand |
def draft_pick(self):
"""Returns when in the draft the player was picked.
:returns: TODO
"""
doc = self.get_main_doc()
try:
p_tags = doc('div#meta p')
draft_p_tag = next(p for p in p_tags.items() if p.text().lower().startswith('draft'))
draft_pick = int(re.search(r'(\d+)\w{,3}\s+?overall', draft_p_tag.text()).group(1))
return draft_pick
except Exception as e:
return None |
def _get_stats_table(self, table_id, kind='R', summary=False):
"""Gets a stats table from the player page; helper function that does
the work for per-game, per-100-poss, etc. stats.
:table_id: the ID of the HTML table.
:kind: specifies regular season, playoffs, or both. One of 'R', 'P',
'B'. Defaults to 'R'.
:returns: A DataFrame of stats.
"""
doc = self.get_main_doc()
table_id = 'table#{}{}'.format(
'playoffs_' if kind == 'P' else '', table_id)
table = doc(table_id)
df = sportsref.utils.parse_table(table, flatten=(not summary),
footer=summary)
return df |
def stats_per_game(self, kind='R', summary=False):
"""Returns a DataFrame of per-game box score stats."""
return self._get_stats_table('per_game', kind=kind, summary=summary) |
def stats_totals(self, kind='R', summary=False):
"""Returns a DataFrame of total box score statistics by season."""
return self._get_stats_table('totals', kind=kind, summary=summary) |
def stats_per36(self, kind='R', summary=False):
"""Returns a DataFrame of per-36-minutes stats."""
return self._get_stats_table('per_minute', kind=kind, summary=summary) |
def stats_per100(self, kind='R', summary=False):
"""Returns a DataFrame of per-100-possession stats."""
return self._get_stats_table('per_poss', kind=kind, summary=summary) |
def stats_advanced(self, kind='R', summary=False):
"""Returns a DataFrame of advanced stats."""
return self._get_stats_table('advanced', kind=kind, summary=summary) |
def stats_shooting(self, kind='R', summary=False):
"""Returns a DataFrame of shooting stats."""
return self._get_stats_table('shooting', kind=kind, summary=summary) |
def stats_pbp(self, kind='R', summary=False):
"""Returns a DataFrame of play-by-play stats."""
return self._get_stats_table('advanced_pbp', kind=kind,
summary=summary) |
def gamelog_basic(self, year, kind='R'):
"""Returns a table of a player's basic game-by-game stats for a season.
:param year: The year representing the desired season.
:param kind: specifies regular season, playoffs, or both. One of 'R',
'P', 'B'. Defaults to 'R'.
:returns: A DataFrame of the player's standard boxscore stats from each
game of the season.
:rtype: pd.DataFrame
"""
doc = self.get_sub_doc('gamelog/{}'.format(year))
table = (doc('table#pgl_basic_playoffs')
if kind == 'P' else doc('table#pgl_basic'))
df = sportsref.utils.parse_table(table)
return df |
def parse_play(boxscore_id, details, is_hm):
"""Parse play details from a play-by-play string describing a play.
Assuming valid input, this function returns structured data in a dictionary
describing the play. If the play detail string was invalid, this function
returns None.
:param boxscore_id: the boxscore ID of the play
:param details: detail string for the play
:param is_hm: bool indicating whether the offense is at home
:param returns: dictionary of play attributes or None if invalid
:rtype: dictionary or None
"""
# if input isn't a string, return None
if not details or not isinstance(details, basestring):
return None
bs = sportsref.nba.BoxScore(boxscore_id)
aw, hm = bs.away(), bs.home()
season = sportsref.nba.Season(bs.season())
hm_roster = set(bs.basic_stats().query('is_home == True').player_id.values)
p = {}
p['detail'] = details
p['home'] = hm
p['away'] = aw
p['is_home_play'] = is_hm
# parsing field goal attempts
shotRE = (r'(?P<shooter>{0}) (?P<is_fgm>makes|misses) '
'(?P<is_three>2|3)\-pt shot').format(PLAYER_RE)
distRE = r' (?:from (?P<shot_dist>\d+) ft|at rim)'
assistRE = r' \(assist by (?P<assister>{0})\)'.format(PLAYER_RE)
blockRE = r' \(block by (?P<blocker>{0})\)'.format(PLAYER_RE)
shotRE = r'{0}{1}(?:{2}|{3})?'.format(shotRE, distRE, assistRE, blockRE)
m = re.match(shotRE, details, re.IGNORECASE)
if m:
p['is_fga'] = True
p.update(m.groupdict())
p['shot_dist'] = p['shot_dist'] if p['shot_dist'] is not None else 0
p['shot_dist'] = int(p['shot_dist'])
p['is_fgm'] = p['is_fgm'] == 'makes'
p['is_three'] = p['is_three'] == '3'
p['is_assist'] = pd.notnull(p.get('assister'))
p['is_block'] = pd.notnull(p.get('blocker'))
shooter_home = p['shooter'] in hm_roster
p['off_team'] = hm if shooter_home else aw
p['def_team'] = aw if shooter_home else hm
return p
# parsing jump balls
jumpRE = ((r'Jump ball: (?P<away_jumper>{0}) vs\. (?P<home_jumper>{0})'
r'(?: \((?P<gains_poss>{0}) gains possession\))?')
.format(PLAYER_RE))
m = re.match(jumpRE, details, re.IGNORECASE)
if m:
p['is_jump_ball'] = True
p.update(m.groupdict())
return p
# parsing rebounds
rebRE = (r'(?P<is_oreb>Offensive|Defensive) rebound'
r' by (?P<rebounder>{0}|Team)').format(PLAYER_RE)
m = re.match(rebRE, details, re.I)
if m:
p['is_reb'] = True
p.update(m.groupdict())
p['is_oreb'] = p['is_oreb'].lower() == 'offensive'
p['is_dreb'] = not p['is_oreb']
if p['rebounder'] == 'Team':
p['reb_team'], other = (hm, aw) if is_hm else (aw, hm)
else:
reb_home = p['rebounder'] in hm_roster
p['reb_team'], other = (hm, aw) if reb_home else (aw, hm)
p['off_team'] = p['reb_team'] if p['is_oreb'] else other
p['def_team'] = p['reb_team'] if p['is_dreb'] else other
return p
# parsing free throws
ftRE = (r'(?P<ft_shooter>{}) (?P<is_ftm>makes|misses) '
r'(?P<is_tech_fta>technical )?(?P<is_flag_fta>flagrant )?'
r'(?P<is_clearpath_fta>clear path )?free throw'
r'(?: (?P<fta_num>\d+) of (?P<tot_fta>\d+))?').format(PLAYER_RE)
m = re.match(ftRE, details, re.I)
if m:
p['is_fta'] = True
p.update(m.groupdict())
p['is_ftm'] = p['is_ftm'] == 'makes'
p['is_tech_fta'] = bool(p['is_tech_fta'])
p['is_flag_fta'] = bool(p['is_flag_fta'])
p['is_clearpath_fta'] = bool(p['is_clearpath_fta'])
p['is_pf_fta'] = not p['is_tech_fta']
if p['tot_fta']:
p['tot_fta'] = int(p['tot_fta'])
if p['fta_num']:
p['fta_num'] = int(p['fta_num'])
ft_home = p['ft_shooter'] in hm_roster
p['fta_team'] = hm if ft_home else aw
if not p['is_tech_fta']:
p['off_team'] = hm if ft_home else aw
p['def_team'] = aw if ft_home else hm
return p
# parsing substitutions
subRE = (r'(?P<sub_in>{0}) enters the game for '
r'(?P<sub_out>{0})').format(PLAYER_RE)
m = re.match(subRE, details, re.I)
if m:
p['is_sub'] = True
p.update(m.groupdict())
sub_home = p['sub_in'] in hm_roster or p['sub_out'] in hm_roster
p['sub_team'] = hm if sub_home else aw
return p
# parsing turnovers
toReasons = (r'(?P<to_type>[^;]+)(?:; steal by '
r'(?P<stealer>{0}))?').format(PLAYER_RE)
toRE = (r'Turnover by (?P<to_by>{}|Team) '
r'\((?:{})\)').format(PLAYER_RE, toReasons)
m = re.match(toRE, details, re.I)
if m:
p['is_to'] = True
p.update(m.groupdict())
p['to_type'] = p['to_type'].lower()
if p['to_type'] == 'offensive foul':
return None
p['is_steal'] = pd.notnull(p['stealer'])
p['is_travel'] = p['to_type'] == 'traveling'
p['is_shot_clock_viol'] = p['to_type'] == 'shot clock'
p['is_oob'] = p['to_type'] == 'step out of bounds'
p['is_three_sec_viol'] = p['to_type'] == '3 sec'
p['is_backcourt_viol'] = p['to_type'] == 'back court'
p['is_off_goaltend'] = p['to_type'] == 'offensive goaltending'
p['is_double_dribble'] = p['to_type'] == 'dbl dribble'
p['is_discont_dribble'] = p['to_type'] == 'discontinued dribble'
p['is_carry'] = p['to_type'] == 'palming'
if p['to_by'] == 'Team':
p['off_team'] = hm if is_hm else aw
p['def_team'] = aw if is_hm else hm
else:
to_home = p['to_by'] in hm_roster
p['off_team'] = hm if to_home else aw
p['def_team'] = aw if to_home else hm
return p
# parsing shooting fouls
shotFoulRE = (r'Shooting(?P<is_block_foul> block)? foul by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(shotFoulRE, details, re.I)
if m:
p['is_pf'] = True
p['is_shot_foul'] = True
p.update(m.groupdict())
p['is_block_foul'] = bool(p['is_block_foul'])
foul_on_home = p['fouler'] in hm_roster
p['off_team'] = aw if foul_on_home else hm
p['def_team'] = hm if foul_on_home else aw
p['foul_team'] = p['def_team']
return p
# parsing offensive fouls
offFoulRE = (r'Offensive(?P<is_charge> charge)? foul '
r'by (?P<to_by>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(offFoulRE, details, re.I)
if m:
p['is_pf'] = True
p['is_off_foul'] = True
p['is_to'] = True
p['to_type'] = 'offensive foul'
p.update(m.groupdict())
p['is_charge'] = bool(p['is_charge'])
p['fouler'] = p['to_by']
foul_on_home = p['fouler'] in hm_roster
p['off_team'] = hm if foul_on_home else aw
p['def_team'] = aw if foul_on_home else hm
p['foul_team'] = p['off_team']
return p
# parsing personal fouls
foulRE = (r'Personal (?P<is_take_foul>take )?(?P<is_block_foul>block )?'
r'foul by (?P<fouler>{0})(?: \(drawn by '
r'(?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(foulRE, details, re.I)
if m:
p['is_pf'] = True
p.update(m.groupdict())
p['is_take_foul'] = bool(p['is_take_foul'])
p['is_block_foul'] = bool(p['is_block_foul'])
foul_on_home = p['fouler'] in hm_roster
p['off_team'] = aw if foul_on_home else hm
p['def_team'] = hm if foul_on_home else aw
p['foul_team'] = p['def_team']
return p
# TODO: parsing double personal fouls
# double_foul_re = (r'Double personal foul by (?P<fouler1>{0}) and '
# r'(?P<fouler2>{0})').format(PLAYER_RE)
# m = re.match(double_Foul_re, details, re.I)
# if m:
# p['is_pf'] = True
# p.update(m.groupdict())
# p['off_team'] =
# parsing loose ball fouls
looseBallRE = (r'Loose ball foul by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(looseBallRE, details, re.I)
if m:
p['is_pf'] = True
p['is_loose_ball_foul'] = True
p.update(m.groupdict())
foul_home = p['fouler'] in hm_roster
p['foul_team'] = hm if foul_home else aw
return p
# parsing punching fouls
# TODO
# parsing away from play fouls
awayFromBallRE = ((r'Away from play foul by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?')
.format(PLAYER_RE))
m = re.match(awayFromBallRE, details, re.I)
if m:
p['is_pf'] = True
p['is_away_from_play_foul'] = True
p.update(m.groupdict())
foul_on_home = p['fouler'] in hm_roster
# TODO: figure out who had the ball based on previous play
p['foul_team'] = hm if foul_on_home else aw
return p
# parsing inbound fouls
inboundRE = (r'Inbound foul by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(inboundRE, details, re.I)
if m:
p['is_pf'] = True
p['is_inbound_foul'] = True
p.update(m.groupdict())
foul_on_home = p['fouler'] in hm_roster
p['off_team'] = aw if foul_on_home else hm
p['def_team'] = hm if foul_on_home else aw
p['foul_team'] = p['def_team']
return p
# parsing flagrant fouls
flagrantRE = (r'Flagrant foul type (?P<flag_type>1|2) by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(flagrantRE, details, re.I)
if m:
p['is_pf'] = True
p['is_flagrant'] = True
p.update(m.groupdict())
foul_on_home = p['fouler'] in hm_roster
p['foul_team'] = hm if foul_on_home else aw
return p
# parsing clear path fouls
clearPathRE = (r'Clear path foul by (?P<fouler>{0})'
r'(?: \(drawn by (?P<drew_foul>{0})\))?').format(PLAYER_RE)
m = re.match(clearPathRE, details, re.I)
if m:
p['is_pf'] = True
p['is_clear_path_foul'] = True
p.update(m.groupdict())
foul_on_home = p['fouler'] in hm_roster
p['off_team'] = aw if foul_on_home else hm
p['def_team'] = hm if foul_on_home else aw
p['foul_team'] = p['def_team']
return p
# parsing timeouts
timeoutRE = r'(?P<timeout_team>.*?) (?:full )?timeout'
m = re.match(timeoutRE, details, re.I)
if m:
p['is_timeout'] = True
p.update(m.groupdict())
isOfficialTO = p['timeout_team'].lower() == 'official'
name_to_id = season.team_names_to_ids()
p['timeout_team'] = (
'Official' if isOfficialTO else
name_to_id.get(hm, name_to_id.get(aw, p['timeout_team']))
)
return p
# parsing technical fouls
techRE = (r'(?P<is_hanging>Hanging )?'
r'(?P<is_taunting>Taunting )?'
r'(?P<is_ill_def>Ill def )?'
r'(?P<is_delay>Delay )?'
r'(?P<is_unsport>Non unsport )?'
r'tech(?:nical)? foul by '
r'(?P<tech_fouler>{0}|Team)').format(PLAYER_RE)
m = re.match(techRE, details, re.I)
if m:
p['is_tech_foul'] = True
p.update(m.groupdict())
p['is_hanging'] = bool(p['is_hanging'])
p['is_taunting'] = bool(p['is_taunting'])
p['is_ill_def'] = bool(p['is_ill_def'])
p['is_delay'] = bool(p['is_delay'])
p['is_unsport'] = bool(p['is_unsport'])
foul_on_home = p['tech_fouler'] in hm_roster
p['foul_team'] = hm if foul_on_home else aw
return p
# parsing ejections
ejectRE = r'(?P<ejectee>{0}|Team) ejected from game'.format(PLAYER_RE)
m = re.match(ejectRE, details, re.I)
if m:
p['is_ejection'] = True
p.update(m.groupdict())
if p['ejectee'] == 'Team':
p['ejectee_team'] = hm if is_hm else aw
else:
eject_home = p['ejectee'] in hm_roster
p['ejectee_team'] = hm if eject_home else aw
return p
# parsing defensive 3 seconds techs
def3TechRE = (r'(?:Def 3 sec tech foul|Defensive three seconds)'
r' by (?P<tech_fouler>{})').format(PLAYER_RE)
m = re.match(def3TechRE, details, re.I)
if m:
p['is_tech_foul'] = True
p['is_def_three_secs'] = True
p.update(m.groupdict())
foul_on_home = p['tech_fouler'] in hm_roster
p['off_team'] = aw if foul_on_home else hm
p['def_team'] = hm if foul_on_home else aw
p['foul_team'] = p['def_team']
return p
# parsing violations
violRE = (r'Violation by (?P<violator>{0}|Team) '
r'\((?P<viol_type>.*)\)').format(PLAYER_RE)
m = re.match(violRE, details, re.I)
if m:
p['is_viol'] = True
p.update(m.groupdict())
if p['viol_type'] == 'kicked_ball':
p['is_to'] = True
p['to_by'] = p['violator']
if p['violator'] == 'Team':
p['viol_team'] = hm if is_hm else aw
else:
viol_home = p['violator'] in hm_roster
p['viol_team'] = hm if viol_home else aw
return p
p['is_error'] = True
return p |
def clean_features(df):
"""Fixes up columns of the passed DataFrame, such as casting T/F columns to
boolean and filling in NaNs for team and opp.
:param df: DataFrame of play-by-play data.
:returns: Dataframe with cleaned columns.
"""
df = pd.DataFrame(df)
bool_vals = set([True, False, None, np.nan])
sparse_cols = sparse_lineup_cols(df)
for col in df:
# make indicator columns boolean type (and fill in NaNs)
if set(df[col].unique()[:5]) <= bool_vals:
df[col] = (df[col] == True)
# fill NaN's in sparse lineup columns to 0
elif col in sparse_cols:
df[col] = df[col].fillna(0)
# fix free throw columns on technicals
df.loc[df.is_tech_fta, ['fta_num', 'tot_fta']] = 1
# fill in NaN's/fix off_team and def_team columns
df.off_team.fillna(method='bfill', inplace=True)
df.def_team.fillna(method='bfill', inplace=True)
df.off_team.fillna(method='ffill', inplace=True)
df.def_team.fillna(method='ffill', inplace=True)
return df |
def clean_multigame_features(df):
"""TODO: Docstring for clean_multigame_features.
:df: TODO
:returns: TODO
"""
df = pd.DataFrame(df)
if df.index.value_counts().max() > 1:
df.reset_index(drop=True, inplace=True)
df = clean_features(df)
# if it's many games in one DataFrame, make poss_id and play_id unique
for col in ('play_id', 'poss_id'):
diffs = df[col].diff().fillna(0)
if (diffs < 0).any():
new_col = np.cumsum(diffs.astype(bool))
df.eval('{} = @new_col'.format(col), inplace=True)
return df |
def get_period_starters(df):
"""TODO
"""
def players_from_play(play):
"""Figures out what players are in the game based on the players
mentioned in a play. Returns away and home players as two sets.
:param play: A dictionary representing a parsed play.
:returns: (aw_players, hm_players)
:rtype: tuple of lists
"""
# if it's a tech FT from between periods, don't count this play
if (
play['clock_time'] == '12:00.0' and
(play.get('is_tech_foul') or play.get('is_tech_fta'))
):
return [], []
stats = sportsref.nba.BoxScore(play['boxscore_id']).basic_stats()
home_grouped = stats.groupby('is_home')
hm_roster = set(home_grouped.player_id.get_group(True).values)
aw_roster = set(home_grouped.player_id.get_group(False).values)
player_keys = [
'assister', 'away_jumper', 'blocker', 'drew_foul', 'fouler',
'ft_shooter', 'gains_poss', 'home_jumper', 'rebounder', 'shooter',
'stealer', 'sub_in', 'sub_out', 'to_by'
]
players = [p for p in play[player_keys] if pd.notnull(p)]
aw_players = [p for p in players if p in aw_roster]
hm_players = [p for p in players if p in hm_roster]
return aw_players, hm_players
# create a mapping { quarter => (away_starters, home_starters) }
n_periods = df.quarter.nunique()
period_starters = [(set(), set()) for _ in range(n_periods)]
# fill out this mapping quarter by quarter
for qtr, qtr_grp in df.groupby(df.quarter):
aw_starters, hm_starters = period_starters[qtr-1]
exclude = set()
# loop through sets of plays that happen at the "same time"
for label, time_grp in qtr_grp.groupby(qtr_grp.secs_elapsed):
# first, if they sub in and weren't already starters, exclude them
sub_ins = set(time_grp.sub_in.dropna().values)
exclude.update(sub_ins - aw_starters - hm_starters)
# second, figure out new starters from each play at this time
for i, row in time_grp.iterrows():
aw_players, hm_players = players_from_play(row)
# update overall sets for the quarter
aw_starters.update(aw_players)
hm_starters.update(hm_players)
# remove excluded (subbed-in) players
hm_starters -= exclude
aw_starters -= exclude
# check whether we have found all starters
if len(hm_starters) > 5 or len(aw_starters) > 5:
import ipdb
ipdb.set_trace()
if len(hm_starters) >= 5 and len(aw_starters) >= 5:
break
if len(hm_starters) != 5 or len(aw_starters) != 5:
print('WARNING: wrong number of starters for a team in Q{} of {}'
.format(qtr, df.boxscore_id.iloc[0]))
return period_starters |
def get_sparse_lineups(df):
"""TODO: Docstring for get_sparse_lineups.
:param df: TODO
:returns: TODO
"""
# get the lineup data using get_dense_lineups if necessary
if (set(ALL_LINEUP_COLS) - set(df.columns)):
lineup_df = get_dense_lineups(df)
else:
lineup_df = df[ALL_LINEUP_COLS]
# create the sparse representation
hm_lineups = lineup_df[HM_LINEUP_COLS].values
aw_lineups = lineup_df[AW_LINEUP_COLS].values
# +1 for home, -1 for away
hm_df = pd.DataFrame([
{'{}_in'.format(player_id): 1 for player_id in lineup}
for lineup in hm_lineups
], dtype=int)
aw_df = pd.DataFrame([
{'{}_in'.format(player_id): -1 for player_id in lineup}
for lineup in aw_lineups
], dtype=int)
sparse_df = pd.concat((hm_df, aw_df), axis=1).fillna(0)
return sparse_df |
def get_dense_lineups(df):
"""Returns a new DataFrame based on the one it is passed. Specifically, it
adds five columns for each team (ten total), where each column has the ID
of a player on the court during the play.
This information is figured out sequentially from the game's substitution
data in the passed DataFrame, so the DataFrame passed as an argument must
be from a specific BoxScore (rather than a DataFrame of non-consecutive
plays). That is, the DataFrame must be of the form returned by
:func:`nba.BoxScore.pbp <nba.BoxScore.pbp>`.
.. note:: Note that the lineups reflect the teams in the game when the play
happened, not after the play. For example, if a play is a substitution,
the lineups for that play will be the lineups before the substituion
occurs.
:param df: A DataFrame of a game's play-by-play data.
:returns: A DataFrame with additional lineup columns.
"""
# TODO: add this precondition to documentation
assert df['boxscore_id'].nunique() == 1
def lineup_dict(aw_lineup, hm_lineup):
"""Returns a dictionary of lineups to be converted to columns.
Specifically, the columns are 'aw_player1' through 'aw_player5' and
'hm_player1' through 'hm_player5'.
:param aw_lineup: The away team's current lineup.
:param hm_lineup: The home team's current lineup.
:returns: A dictionary of lineups.
"""
return {
'{}_player{}'.format(tm, i+1): player
for tm, lineup in zip(['aw', 'hm'], [aw_lineup, hm_lineup])
for i, player in enumerate(lineup)
}
def handle_sub(row, aw_lineup, hm_lineup):
"""Modifies the aw_lineup and hm_lineup lists based on the substitution
that takes place in the given row."""
assert row['is_sub']
sub_lineup = hm_lineup if row['sub_team'] == row['home'] else aw_lineup
try:
# make the sub
idx = sub_lineup.index(row['sub_out'])
sub_lineup[idx] = row['sub_in']
except ValueError:
# if the sub was double-entered and it's already been executed...
if (
row['sub_in'] in sub_lineup
and row['sub_out'] not in sub_lineup
):
return aw_lineup, hm_lineup
# otherwise, let's print and pretend this never happened
print('ERROR IN SUB IN {}, Q{}, {}: {}'
.format(row['boxscore_id'], row['quarter'],
row['clock_time'], row['detail']))
raise
return aw_lineup, hm_lineup
per_starters = get_period_starters(df)
cur_qtr = 0
aw_lineup, hm_lineup = [], []
df = df.reset_index(drop=True)
lineups = [{} for _ in range(df.shape[0])]
# loop through select plays to determine lineups
sub_or_per_start = df.is_sub | df.quarter.diff().astype(bool)
for i, row in df.loc[sub_or_per_start].iterrows():
if row['quarter'] > cur_qtr:
# first row in a quarter
assert row['quarter'] == cur_qtr + 1
# first, finish up the last quarter's lineups
if cur_qtr > 0 and not df.loc[i-1, 'is_sub']:
lineups[i-1] = lineup_dict(aw_lineup, hm_lineup)
# then, move on to the quarter, and enter the starting lineups
cur_qtr += 1
aw_lineup, hm_lineup = map(list, per_starters[cur_qtr-1])
lineups[i] = lineup_dict(aw_lineup, hm_lineup)
# if the first play in the quarter is a sub, handle that
if row['is_sub']:
aw_lineup, hm_lineup = handle_sub(row, aw_lineup, hm_lineup)
else:
# during the quarter
# update lineups first then change lineups based on subs
lineups[i] = lineup_dict(aw_lineup, hm_lineup)
if row['is_sub']:
aw_lineup, hm_lineup = handle_sub(row, aw_lineup, hm_lineup)
# create and clean DataFrame
lineup_df = pd.DataFrame(lineups)
if lineup_df.iloc[-1].isnull().all():
lineup_df.iloc[-1] = lineup_dict(aw_lineup, hm_lineup)
lineup_df = lineup_df.groupby(df.quarter).fillna(method='bfill')
# fill in NaN's based on minutes played
bool_mat = lineup_df.isnull()
mask = bool_mat.any(axis=1)
if mask.any():
bs = sportsref.nba.BoxScore(df.boxscore_id[0])
# first, get the true minutes played from the box score
stats = sportsref.nba.BoxScore(df.boxscore_id.iloc[0]).basic_stats()
true_mp = pd.Series(
stats.query('mp > 0')[['player_id', 'mp']]
.set_index('player_id').to_dict()['mp']
) * 60
# next, calculate minutes played based on the lineup data
calc_mp = pd.Series(
{p: (df.secs_elapsed.diff() *
[p in row for row in lineup_df.values]).sum()
for p in stats.query('mp > 0').player_id.values})
# finally, figure which players are missing minutes
diff = true_mp - calc_mp
players_missing = diff.loc[diff.abs() >= 150]
hm_roster = bs.basic_stats().query('is_home == True').player_id.values
missing_df = pd.DataFrame(
{'secs': players_missing.values,
'is_home': players_missing.index.isin(hm_roster)},
index=players_missing.index
)
if missing_df.empty:
# TODO: log this as a warning (or error?)
print('There are NaNs in the lineup data, but no players were '
'found to be missing significant minutes')
else:
# import ipdb
# ipdb.set_trace()
for is_home, group in missing_df.groupby('is_home'):
player_id = group.index.item()
tm_cols = (sportsref.nba.pbp.HM_LINEUP_COLS if is_home else
sportsref.nba.pbp.AW_LINEUP_COLS)
row_mask = lineup_df[tm_cols].isnull().any(axis=1)
lineup_df.loc[row_mask, tm_cols] = (
lineup_df.loc[row_mask, tm_cols].fillna(player_id).values
)
return lineup_df |
def GamePlayFinder(**kwargs):
""" Docstring will be filled in by __init__.py """
querystring = _kwargs_to_qs(**kwargs)
url = '{}?{}'.format(GPF_URL, querystring)
# if verbose, print url
if kwargs.get('verbose', False):
print(url)
html = utils.get_html(url)
doc = pq(html)
# parse
table = doc('table#all_plays')
plays = utils.parse_table(table)
# parse score column
if 'score' in plays.columns:
oScore, dScore = zip(*plays.score.apply(lambda s: s.split('-')))
plays['teamScore'] = oScore
plays['oppScore'] = dScore
# add parsed pbp info
if 'description' in plays.columns:
plays = pbp.expand_details(plays, detailCol='description')
return plays |
def _kwargs_to_qs(**kwargs):
"""Converts kwargs given to GPF to a querystring.
:returns: the querystring.
"""
# start with defaults
inpOptDef = inputs_options_defaults()
opts = {
name: dct['value']
for name, dct in inpOptDef.items()
}
# clean up keys and values
for k, v in kwargs.items():
# pID, playerID => player_id
if k.lower() in ('pid', 'playerid'):
del kwargs[k]
kwargs['player_id'] = v
# player_id can accept rel URLs
if k == 'player_id':
if v.startswith('/players/'):
kwargs[k] = utils.rel_url_to_id(v)
# bool => 'Y'|'N'
if isinstance(v, bool):
kwargs[k] = 'Y' if v else 'N'
# tm, team => team_id
if k.lower() in ('tm', 'team'):
del kwargs[k]
kwargs['team_id'] = v
# yr_min, yr_max => year_min, year_max
if k.lower() in ('yr_min', 'yr_max'):
del kwargs[k]
if k.lower() == 'yr_min':
kwargs['year_min'] = int(v)
else:
kwargs['year_max'] = int(v)
# wk_min, wk_max => week_num_min, week_num_max
if k.lower() in ('wk_min', 'wk_max'):
del kwargs[k]
if k.lower() == 'wk_min':
kwargs['week_num_min'] = int(v)
else:
kwargs['week_num_max'] = int(v)
# yr, year, yrs, years => year_min, year_max
if k.lower() in ('yr', 'year', 'yrs', 'years'):
del kwargs[k]
if isinstance(v, collections.Iterable):
lst = list(v)
kwargs['year_min'] = min(lst)
kwargs['year_max'] = max(lst)
elif isinstance(v, basestring):
v = list(map(int, v.split(',')))
kwargs['year_min'] = min(v)
kwargs['year_max'] = max(v)
else:
kwargs['year_min'] = v
kwargs['year_max'] = v
# wk, week, wks, weeks => week_num_min, week_num_max
if k.lower() in ('wk', 'week', 'wks', 'weeks'):
del kwargs[k]
if isinstance(v, collections.Iterable):
lst = list(v)
kwargs['week_num_min'] = min(lst)
kwargs['week_num_max'] = max(lst)
elif isinstance(v, basestring):
v = list(map(int, v.split(',')))
kwargs['week_num_min'] = min(v)
kwargs['week_num_max'] = max(v)
else:
kwargs['week_num_min'] = v
kwargs['week_num_max'] = v
# if playoff_round defined, then turn on playoff flag
if k == 'playoff_round':
kwargs['game_type'] = 'P'
if isinstance(v, basestring):
v = v.split(',')
if not isinstance(v, collections.Iterable):
v = [v]
# reset values to blank for defined kwargs
for k in kwargs:
if k in opts:
opts[k] = []
# update based on kwargs
for k, v in kwargs.items():
# if overwriting a default, overwrite it
if k in opts:
# if multiple values separated by commas, split em
if isinstance(v, basestring):
v = v.split(',')
elif not isinstance(v, collections.Iterable):
v = [v]
for val in v:
opts[k].append(val)
opts['request'] = [1]
qs = '&'.join('{}={}'.format(name, val)
for name, vals in sorted(opts.items()) for val in vals)
return qs |
def inputs_options_defaults():
"""Handles scraping options for play finder form.
:returns: {'name1': {'value': val, 'options': [opt1, ...] }, ... }
"""
# set time variables
if os.path.isfile(GPF_CONSTANTS_FILENAME):
modtime = int(os.path.getmtime(GPF_CONSTANTS_FILENAME))
curtime = int(time.time())
# if file found and it's been <= a week
if (os.path.isfile(GPF_CONSTANTS_FILENAME)
and curtime - modtime <= 7 * 24 * 60 * 60):
# just read the dict from the cached file
with open(GPF_CONSTANTS_FILENAME, 'r') as const_f:
def_dict = json.load(const_f)
# otherwise, we must regenerate the dict and rewrite it
else:
print('Regenerating GPFConstants file')
html = utils.get_html(GPF_URL)
doc = pq(html)
def_dict = {}
# start with input elements
for inp in doc('form#play_finder input[name]'):
name = inp.attrib['name']
# add blank dict if not present
if name not in def_dict:
def_dict[name] = {
'value': set(),
'options': set(),
'type': inp.type
}
val = inp.attrib.get('value', '')
# handle checkboxes and radio buttons
if inp.type in ('checkbox', 'radio'):
# deal with default value
if 'checked' in inp.attrib:
def_dict[name]['value'].add(val)
# add to options
def_dict[name]['options'].add(val)
# handle other types of inputs (only other type is hidden?)
else:
def_dict[name]['value'].add(val)
# for dropdowns (select elements)
for sel in doc.items('form#play_finder select[name]'):
name = sel.attr['name']
# add blank dict if not present
if name not in def_dict:
def_dict[name] = {
'value': set(),
'options': set(),
'type': 'select'
}
# deal with default value
defaultOpt = sel('option[selected]')
if len(defaultOpt):
defaultOpt = defaultOpt[0]
def_dict[name]['value'].add(defaultOpt.attrib.get('value', ''))
else:
def_dict[name]['value'].add(
sel('option')[0].attrib.get('value', '')
)
# deal with options
def_dict[name]['options'] = {
opt.attrib['value'] for opt in sel('option')
if opt.attrib.get('value')
}
# ignore QB kneels by default
def_dict['include_kneels']['value'] = ['0']
def_dict.pop('request', None)
def_dict.pop('use_favorites', None)
with open(GPF_CONSTANTS_FILENAME, 'w+') as f:
for k in def_dict:
try:
def_dict[k]['value'] = sorted(
list(def_dict[k]['value']), key=int
)
def_dict[k]['options'] = sorted(
list(def_dict[k]['options']), key=int
)
except:
def_dict[k]['value'] = sorted(list(def_dict[k]['value']))
def_dict[k]['options'] = sorted(
list(def_dict[k]['options'])
)
json.dump(def_dict, f)
return def_dict |
def get(self):
'''
Please don't do this in production environments.
'''
self.write("Memory Session Object Demo:")
if "sv" in self.session:
current_value = self.session["sv"]
self.write("current sv value is %s, and system will delete this value.<br/>" % self.session["sv"])
self.session.delete("sv")
if "sv" not in self.session:
self.write("current sv value is empty")
else:
self.write("Session data not found") |
def expand_details(df, detailCol='detail'):
"""Expands the details column of the given dataframe and returns the
resulting DataFrame.
:df: The input DataFrame.
:detailCol: The detail column name.
:returns: Returns DataFrame with new columns from pbp parsing.
"""
df = copy.deepcopy(df)
df['detail'] = df[detailCol]
dicts = [sportsref.nfl.pbp.parse_play_details(detail) for detail in df['detail'].values]
# clean up unmatched details
cols = {c for d in dicts if d for c in d.keys()}
blankEntry = {c: np.nan for c in cols}
newDicts = [d if d else blankEntry for d in dicts]
# get details DataFrame and merge it with original to create main DataFrame
details = pd.DataFrame(newDicts)
df = pd.merge(df, details, left_index=True, right_index=True)
# add isError column
errors = [i for i, d in enumerate(dicts) if d is None]
df['isError'] = False
df.loc[errors, 'isError'] = True
# fill in some NaN's necessary for _clean_features
df.loc[0, 'qtr_time_remain'] = '15:00'
df.qtr_time_remain.fillna(method='bfill', inplace=True)
df.qtr_time_remain.fillna(
pd.Series(np.where(df.quarter == 4, '0:00', '15:00')), inplace=True
)
# use _clean_features to clean up and add columns
new_df = df.apply(_clean_features, axis=1)
return new_df |
def parse_play_details(details):
"""Parses play details from play-by-play string and returns structured
data.
:details: detail string for play
:returns: dictionary of play attributes
"""
# if input isn't a string, return None
if not isinstance(details, basestring):
return None
rushOptRE = r'(?P<rushDir>{})'.format(
r'|'.join(RUSH_OPTS.keys())
)
passOptRE = r'(?P<passLoc>{})'.format(
r'|'.join(PASS_OPTS.keys())
)
playerRE = r"\S{6,8}\d{2}"
# initialize return dictionary - struct
struct = {}
# handle challenges
# TODO: record the play both before & after an overturned challenge
challengeRE = re.compile(
r'.+\. (?P<challenger>.+?) challenged.*? the play was '
'(?P<callUpheld>upheld|overturned)\.',
re.IGNORECASE
)
match = challengeRE.search(details)
if match:
struct['isChallenge'] = True
struct.update(match.groupdict())
# if overturned, only record updated play
if 'overturned' in details:
overturnedIdx = details.index('overturned.')
newStart = overturnedIdx + len('overturned.')
details = details[newStart:].strip()
else:
struct['isChallenge'] = False
# TODO: expand on laterals
struct['isLateral'] = details.find('lateral') != -1
# create rushing regex
rusherRE = r"(?P<rusher>{0})".format(playerRE)
rushOptRE = r"(?: {})?".format(rushOptRE)
rushYardsRE = r"(?:(?:(?P<rushYds>\-?\d+) yards?)|(?:no gain))"
# cases: tackle, fumble, td, penalty
tackleRE = (r"(?: \(tackle by (?P<tackler1>{0})"
r"(?: and (?P<tackler2>{0}))?\))?"
.format(playerRE))
# currently, plays with multiple fumbles record the original fumbler
# and the final fumble recoverer
fumbleRE = (
r"(?:"
r"\.? ?(?P<fumbler>{0}) fumbles"
r"(?: \(forced by (?P<fumbForcer>{0})\))?"
r"(?:.*, recovered by (?P<fumbRecoverer>{0}) at )?"
r"(?:, ball out of bounds at )?"
r"(?:(?P<fumbRecFieldSide>[a-z]+)?\-?(?P<fumbRecYdLine>\-?\d+))?"
r"(?: and returned for (?P<fumbRetYds>\-?\d*) yards)?"
r")?"
.format(playerRE))
tdSafetyRE = r"(?:(?P<isTD>, touchdown)|(?P<isSafety>, safety))?"
# TODO: offsetting penalties
penaltyRE = (r"(?:.*?"
r"\. Penalty on (?P<penOn>{0}|): "
r"(?P<penalty>[^\(,]+)"
r"(?: \((?P<penDeclined>Declined)\)|"
r", (?P<penYds>\d*) yards?)"
r"(?: \(no play\))?"
r")?"
.format(playerRE))
rushREstr = (
r"{}{}(?: for {}{}{}{}{})?"
).format(rusherRE, rushOptRE, rushYardsRE, tackleRE, fumbleRE, tdSafetyRE,
penaltyRE)
rushRE = re.compile(rushREstr, re.IGNORECASE)
# create passing regex
# TODO: capture "defended by X" for defensive stats
passerRE = r"(?P<passer>{0})".format(playerRE)
sackRE = (r"(?:sacked (?:by (?P<sacker1>{0})(?: and (?P<sacker2>{0}))? )?"
r"for (?P<sackYds>\-?\d+) yards?)"
.format(playerRE))
# create throw RE
completeRE = r"pass (?P<isComplete>(?:in)?complete)"
passOptRE = r"(?: {})?".format(passOptRE)
targetedRE = r"(?: (?:to |intended for )?(?P<target>{0}))?".format(
playerRE)
passYardsRE = r"(?: for (?:(?P<passYds>\-?\d+) yards?|no gain))"
intRE = (r'(?: is intercepted by (?P<interceptor>{0}) at '.format(playerRE)
+ r'(?:(?P<intFieldSide>[a-z]*)?\-?(?P<intYdLine>\-?\d*))?'
+ r'(?: and returned for (?P<intRetYds>\-?\d+) yards?\.?)?)?')
throwRE = r'(?:{}{}{}(?:(?:{}|{}){})?)'.format(
completeRE, passOptRE, targetedRE, passYardsRE, intRE, tackleRE
)
passREstr = (
r"{} (?:{}|{})(?:{}{}{})?"
).format(passerRE, sackRE, throwRE, fumbleRE, tdSafetyRE, penaltyRE)
passRE = re.compile(passREstr, re.IGNORECASE)
# create kickoff regex
koKickerRE = r'(?P<koKicker>{0})'.format(playerRE)
koYardsRE = (r' kicks (?:off|(?P<isOnside>onside))'
r' (?:(?P<koYds>\d+) yards?|no gain)')
nextREs = []
nextREs.append(
(r', (?:returned|recovered) by (?P<koReturner>{0})(?: for '
r'(?:(?P<koRetYds>\-?\d+) yards?|no gain))?').format(playerRE)
)
nextREs.append(
(r'(?P<isMuffedCatch>, muffed catch by )(?P<muffedBy>{0}),'
r'(?: recovered by (?P<muffRecoverer>{0}))?').format(playerRE) +
r'(?: and returned for (?:(?P<muffRetYds>\-?\d+) yards|no gain))?'
)
nextREs.append(
r', recovered by (?P<onsideRecoverer>{0})'.format(playerRE)
)
nextREs.append(r'(?P<oob>, out of bounds)')
nextREs.append(r'(?P<isTouchback>, touchback)')
# TODO: test the following line to fix a small subset of cases
# (ex: muff -> oob)
nextRE = ''.join(r'(?:{})?'.format(nre) for nre in nextREs)
kickoffREstr = r'{}{}{}{}{}{}{}'.format(
koKickerRE, koYardsRE, nextRE,
tackleRE, fumbleRE, tdSafetyRE, penaltyRE
)
kickoffRE = re.compile(kickoffREstr, re.IGNORECASE)
# create timeout regex
timeoutREstr = r'Timeout #(?P<timeoutNum>\d) by (?P<timeoutTeam>.+)'
timeoutRE = re.compile(timeoutREstr, re.IGNORECASE)
# create FG regex
fgKickerRE = r'(?P<fgKicker>{0})'.format(playerRE)
fgBaseRE = (r' (?P<fgDist>\d+) yard field goal'
r' (?P<fgGood>good|no good)')
fgBlockRE = (
r'(?:, (?P<isBlocked>blocked) by '
r'(?P<fgBlocker>{0}))?'.format(playerRE) +
r'(?:, recovered by (?P<fgBlockRecoverer>{0}))?'.format(playerRE) +
r'(?: and returned for (?:(?P<fgBlockRetYds>\-?\d+) yards?|no gain))?'
)
fgREstr = r'{}{}{}{}{}'.format(fgKickerRE, fgBaseRE,
fgBlockRE, tdSafetyRE, penaltyRE)
fgRE = re.compile(fgREstr, re.IGNORECASE)
# create punt regex
punterRE = r'.*?(?P<punter>{0})'.format(playerRE)
puntBlockRE = (
(r' punts, (?P<isBlocked>blocked) by (?P<puntBlocker>{0})'
r'(?:, recovered by (?P<puntBlockRecoverer>{0})').format(playerRE) +
r'(?: and returned (?:(?P<puntBlockRetYds>\-?\d+) yards|no gain))?)?'
)
puntYdsRE = r' punts (?P<puntYds>\d+) yards?'
nextREs = []
nextREs.append(r', (?P<isFairCatch>fair catch) by (?P<fairCatcher>{0})'
.format(playerRE))
nextREs.append(r', (?P<oob>out of bounds)')
nextREs.append(
(r'(?P<isMuffedCatch>, muffed catch by )(?P<muffedBy>{0}),'
r' recovered by (?P<muffRecoverer>{0})').format(playerRE) +
r' and returned for ' +
r'(?:(?P<muffRetYds>\d+) yards|no gain)'
)
nextREs.append(
r', returned by (?P<puntReturner>{0}) for '.format(playerRE) +
r'(?:(?P<puntRetYds>\-?\d+) yards?|no gain)'
)
nextRE = r'(?:{})?'.format('|'.join(nextREs))
puntREstr = r'{}(?:{}|{}){}{}{}{}{}'.format(
punterRE, puntBlockRE, puntYdsRE, nextRE,
tackleRE, fumbleRE, tdSafetyRE, penaltyRE
)
puntRE = re.compile(puntREstr, re.IGNORECASE)
# create kneel regex
kneelREstr = (r'(?P<kneelQB>{0}) kneels for '.format(playerRE) +
r'(?:(?P<kneelYds>\-?\d+) yards?|no gain)')
kneelRE = re.compile(kneelREstr, re.IGNORECASE)
# create spike regex
spikeREstr = r'(?P<spikeQB>{0}) spiked the ball'.format(playerRE)
spikeRE = re.compile(spikeREstr, re.IGNORECASE)
# create XP regex
extraPointREstr = (r'(?:(?P<xpKicker>{0}) kicks)? ?extra point '
r'(?P<xpGood>good|no good)').format(playerRE)
extraPointRE = re.compile(extraPointREstr, re.IGNORECASE)
# create 2pt conversion regex
twoPointREstr = (
r'Two Point Attempt: (?P<twoPoint>.*?),?\s+conversion\s+'
r'(?P<twoPointSuccess>succeeds|fails)'
)
twoPointRE = re.compile(twoPointREstr, re.IGNORECASE)
# create penalty regex
psPenaltyREstr = (
r'^Penalty on (?P<penOn>{0}|'.format(playerRE) + r'\w{3}): ' +
r'(?P<penalty>[^\(,]+)(?: \((?P<penDeclined>Declined)\)|' +
r', (?P<penYds>\d*) yards?|' +
r'.*?(?: \(no play\)))')
psPenaltyRE = re.compile(psPenaltyREstr, re.IGNORECASE)
# try parsing as a kickoff
match = kickoffRE.search(details)
if match:
# parse as a kickoff
struct['isKickoff'] = True
struct.update(match.groupdict())
return struct
# try parsing as a timeout
match = timeoutRE.search(details)
if match:
# parse as timeout
struct['isTimeout'] = True
struct.update(match.groupdict())
return struct
# try parsing as a field goal
match = fgRE.search(details)
if match:
# parse as a field goal
struct['isFieldGoal'] = True
struct.update(match.groupdict())
return struct
# try parsing as a punt
match = puntRE.search(details)
if match:
# parse as a punt
struct['isPunt'] = True
struct.update(match.groupdict())
return struct
# try parsing as a kneel
match = kneelRE.search(details)
if match:
# parse as a kneel
struct['isKneel'] = True
struct.update(match.groupdict())
return struct
# try parsing as a spike
match = spikeRE.search(details)
if match:
# parse as a spike
struct['isSpike'] = True
struct.update(match.groupdict())
return struct
# try parsing as an XP
match = extraPointRE.search(details)
if match:
# parse as an XP
struct['isXP'] = True
struct.update(match.groupdict())
return struct
# try parsing as a 2-point conversion
match = twoPointRE.search(details)
if match:
# parse as a 2-point conversion
struct['isTwoPoint'] = True
struct['twoPointSuccess'] = match.group('twoPointSuccess')
realPlay = sportsref.nfl.pbp.parse_play_details(
match.group('twoPoint'))
if realPlay:
struct.update(realPlay)
return struct
# try parsing as a pass
match = passRE.search(details)
if match:
# parse as a pass
struct['isPass'] = True
struct.update(match.groupdict())
return struct
# try parsing as a pre-snap penalty
match = psPenaltyRE.search(details)
if match:
# parse as a pre-snap penalty
struct['isPresnapPenalty'] = True
struct.update(match.groupdict())
return struct
# try parsing as a run
match = rushRE.search(details)
if match:
# parse as a run
struct['isRun'] = True
struct.update(match.groupdict())
return struct
return None |
def _clean_features(struct):
"""Cleans up the features collected in parse_play_details.
:struct: Pandas Series of features parsed from details string.
:returns: the same dict, but with cleaner features (e.g., convert bools,
ints, etc.)
"""
struct = dict(struct)
# First, clean up play type bools
ptypes = ['isKickoff', 'isTimeout', 'isFieldGoal', 'isPunt', 'isKneel',
'isSpike', 'isXP', 'isTwoPoint', 'isPresnapPenalty', 'isPass',
'isRun']
for pt in ptypes:
struct[pt] = struct[pt] if pd.notnull(struct.get(pt)) else False
# Second, clean up other existing variables on a one-off basis
struct['callUpheld'] = struct.get('callUpheld') == 'upheld'
struct['fgGood'] = struct.get('fgGood') == 'good'
struct['isBlocked'] = struct.get('isBlocked') == 'blocked'
struct['isComplete'] = struct.get('isComplete') == 'complete'
struct['isFairCatch'] = struct.get('isFairCatch') == 'fair catch'
struct['isMuffedCatch'] = pd.notnull(struct.get('isMuffedCatch'))
struct['isNoPlay'] = (
' (no play)' in struct['detail'] and
'penalty enforced in end zone' not in struct['detail']
if struct.get('detail') else False)
struct['isOnside'] = struct.get('isOnside') == 'onside'
struct['isSack'] = pd.notnull(struct.get('sackYds'))
struct['isSafety'] = (struct.get('isSafety') == ', safety' or
(struct.get('detail') and
'enforced in end zone, safety' in struct['detail']))
struct['isTD'] = struct.get('isTD') == ', touchdown'
struct['isTouchback'] = struct.get('isTouchback') == ', touchback'
struct['oob'] = pd.notnull(struct.get('oob'))
struct['passLoc'] = PASS_OPTS.get(struct.get('passLoc'), np.nan)
if struct['isPass']:
pyds = struct['passYds']
struct['passYds'] = pyds if pd.notnull(pyds) else 0
if pd.notnull(struct['penalty']):
struct['penalty'] = struct['penalty'].strip()
struct['penDeclined'] = struct.get('penDeclined') == 'Declined'
if struct['quarter'] == 'OT':
struct['quarter'] = 5
struct['rushDir'] = RUSH_OPTS.get(struct.get('rushDir'), np.nan)
if struct['isRun']:
ryds = struct['rushYds']
struct['rushYds'] = ryds if pd.notnull(ryds) else 0
year = struct.get('season', np.nan)
struct['timeoutTeam'] = sportsref.nfl.teams.team_ids(year).get(
struct.get('timeoutTeam'), np.nan
)
struct['twoPointSuccess'] = struct.get('twoPointSuccess') == 'succeeds'
struct['xpGood'] = struct.get('xpGood') == 'good'
# Third, ensure types are correct
bool_vars = [
'fgGood', 'isBlocked', 'isChallenge', 'isComplete', 'isFairCatch',
'isFieldGoal', 'isKickoff', 'isKneel', 'isLateral', 'isNoPlay',
'isPass', 'isPresnapPenalty', 'isPunt', 'isRun', 'isSack', 'isSafety',
'isSpike', 'isTD', 'isTimeout', 'isTouchback', 'isTwoPoint', 'isXP',
'isMuffedCatch', 'oob', 'penDeclined', 'twoPointSuccess', 'xpGood'
]
int_vars = [
'down', 'fgBlockRetYds', 'fgDist', 'fumbRecYdLine', 'fumbRetYds',
'intRetYds', 'intYdLine', 'koRetYds', 'koYds', 'muffRetYds',
'pbp_score_aw', 'pbp_score_hm', 'passYds', 'penYds', 'puntBlockRetYds',
'puntRetYds', 'puntYds', 'quarter', 'rushYds', 'sackYds', 'timeoutNum',
'ydLine', 'yds_to_go'
]
float_vars = [
'exp_pts_after', 'exp_pts_before', 'home_wp'
]
string_vars = [
'challenger', 'detail', 'fairCatcher', 'fgBlockRecoverer',
'fgBlocker', 'fgKicker', 'fieldSide', 'fumbForcer',
'fumbRecFieldSide', 'fumbRecoverer', 'fumbler', 'intFieldSide',
'interceptor', 'kneelQB', 'koKicker', 'koReturner', 'muffRecoverer',
'muffedBy', 'passLoc', 'passer', 'penOn', 'penalty',
'puntBlockRecoverer', 'puntBlocker', 'puntReturner', 'punter',
'qtr_time_remain', 'rushDir', 'rusher', 'sacker1', 'sacker2',
'spikeQB', 'tackler1', 'tackler2', 'target', 'timeoutTeam',
'xpKicker'
]
for var in bool_vars:
struct[var] = struct.get(var) is True
for var in int_vars:
try:
struct[var] = int(struct.get(var))
except (ValueError, TypeError):
struct[var] = np.nan
for var in float_vars:
try:
struct[var] = float(struct.get(var))
except (ValueError, TypeError):
struct[var] = np.nan
for var in string_vars:
if var not in struct or pd.isnull(struct[var]) or var == '':
struct[var] = np.nan
# Fourth, create new helper variables based on parsed variables
# creating fieldSide and ydline from location
if struct['isXP']:
struct['fieldSide'] = struct['ydLine'] = np.nan
else:
fieldSide, ydline = _loc_to_features(struct.get('location'))
struct['fieldSide'] = fieldSide
struct['ydLine'] = ydline
# creating secsElapsed (in entire game) from qtr_time_remain and quarter
if pd.notnull(struct.get('qtr_time_remain')):
qtr = struct['quarter']
mins, secs = map(int, struct['qtr_time_remain'].split(':'))
struct['secsElapsed'] = qtr * 900 - mins * 60 - secs
# creating columns for turnovers
struct['isInt'] = pd.notnull(struct.get('interceptor'))
struct['isFumble'] = pd.notnull(struct.get('fumbler'))
# create column for isPenalty
struct['isPenalty'] = pd.notnull(struct.get('penalty'))
# create columns for EPA
struct['team_epa'] = struct['exp_pts_after'] - struct['exp_pts_before']
struct['opp_epa'] = struct['exp_pts_before'] - struct['exp_pts_after']
return pd.Series(struct) |
def _loc_to_features(loc):
"""Converts a location string "{Half}, {YardLine}" into a tuple of those
values, the second being an int.
:l: The string from the play by play table representing location.
:returns: A tuple that separates out the values, making them missing
(np.nan) when necessary.
"""
if loc:
if isinstance(loc, basestring):
loc = loc.strip()
if ' ' in loc:
r = loc.split()
r[0] = r[0].lower()
r[1] = int(r[1])
else:
r = (np.nan, int(loc))
elif isinstance(loc, float):
return (np.nan, 50)
else:
r = (np.nan, np.nan)
return r |
def _add_team_columns(features):
"""Function that adds 'team' and 'opp' columns to the features by iterating
through the rows in order. A precondition is that the features dicts are in
order in a continuous game sense and that all rows are from the same game.
:features: A DataFrame with each row representing each play (in order).
:returns: A similar DataFrame but with 'team' and 'opp' columns added.
"""
features = features.to_dict('records')
curTm = curOpp = None
playAfterKickoff = False
# fill in team and opp columns
for row in features:
# if it's a kickoff or the play after a kickoff,
# figure out who has possession manually
if row['isKickoff'] or playAfterKickoff:
curTm, curOpp = _team_and_opp(row)
else:
curTm, curOpp = _team_and_opp(row, curTm, curOpp)
row['team'], row['opp'] = curTm, curOpp
# set playAfterKickoff
playAfterKickoff = row['isKickoff']
features = pd.DataFrame(features)
features.team.fillna(method='bfill', inplace=True)
features.opp.fillna(method='bfill', inplace=True)
# ffill for last row
features.team.fillna(method='ffill', inplace=True)
features.opp.fillna(method='ffill', inplace=True)
return features |
def _team_and_opp(struct, curTm=None, curOpp=None):
"""Given a dict representing a play and the current team with the ball,
returns (team, opp) where team is the team with the ball and opp is the
team without the ball at the end of the play.
:struct: A Series/dict representing the play.
:curTm: The current team with the ball; None means it's the first play of
the game or the offensive team on the previous play's offensive team was
somehow undetermined.
:curOpp: The current team on defense; None means same as curTm.
:returns: (team, opp) tuple where team and opp are the 3-character team IDs
or the offensive and defensive teams respectively.
"""
# if we don't know the current team, figure it out
if pd.isnull(curTm):
if struct['isRun']:
pID = struct['rusher']
elif struct['isPass']:
pID = struct['passer']
elif struct['isFieldGoal']:
pID = struct['fgKicker']
elif struct['isPunt']:
pID = struct['punter']
elif struct['isXP']:
pID = struct['xpKicker']
elif struct['isKickoff']:
pID = struct['koKicker']
elif struct['isSpike']:
pID = struct['spikeQB']
elif struct['isKneel']:
pID = struct['kneelQB']
else:
pID = None
curTm = curOpp = np.nan
bs = sportsref.nfl.boxscores.BoxScore(struct['boxscore_id'])
if pID and len(pID) == 3:
curTm = pID
curOpp = bs.away() if bs.home() == curTm else bs.home()
elif pID:
player = sportsref.nfl.Player(pID)
gamelog = player.gamelog(kind='B')
curTm = gamelog.loc[
gamelog.boxscore_id == struct['boxscore_id'], 'team_id'
].item()
curOpp = bs.home() if bs.home() != curTm else bs.away()
return curTm, curOpp
# use row's class to determine when possession changes
if struct['has_class_divider']:
return curOpp, curTm
else:
return curTm, curOpp |
def _add_team_features(df):
"""Adds extra convenience features based on teams with and without
possession, with the precondition that the there are 'team' and 'opp'
specified in row.
:df: A DataFrame representing a game's play-by-play data after
_clean_features has been called and 'team' and 'opp' have been added by
_add_team_columns.
:returns: A dict with new features in addition to previous features.
"""
assert df.team.notnull().all()
homeOnOff = df['team'] == df['home']
# create column for distToGoal
df['distToGoal'] = np.where(df['team'] != df['fieldSide'],
df['ydLine'], 100 - df['ydLine'])
df['distToGoal'] = np.where(df['isXP'] | df['isTwoPoint'],
2, df['distToGoal'])
# create column for each team's WP
df['team_wp'] = np.where(homeOnOff, df['home_wp'], 100. - df['home_wp'])
df['opp_wp'] = 100. - df['team_wp']
# create columns for each team's WPA
df['team_wpa'] = np.where(homeOnOff, df['home_wpa'], -df['home_wpa'])
df['opp_wpa'] = -df['team_wpa']
# create column for offense and defense scores if not already there
assert df['boxscore_id'].nunique() == 1
bs_id = df['boxscore_id'].values[0]
bs = sportsref.nfl.boxscores.BoxScore(bs_id)
df['team_score'] = np.where(df['team'] == bs.home(),
df['pbp_score_hm'], df['pbp_score_aw'])
df['opp_score'] = np.where(df['team'] == bs.home(),
df['pbp_score_aw'], df['pbp_score_hm'])
return df |
def _get_player_stats_table(self, subpage, table_id):
"""Helper function for player season stats.
:identifier: string identifying the type of stat, e.g. 'passing'.
:returns: A DataFrame of stats.
"""
doc = self.get_sub_doc(subpage)
table = doc('table#{}'.format(table_id))
df = sportsref.utils.parse_table(table)
return df |
def initialWinProb(line):
"""Gets the initial win probability of a game given its Vegas line.
:line: The Vegas line from the home team's perspective (negative means
home team is favored).
:returns: A float in [0., 100.] that represents the win probability.
"""
line = float(line)
probWin = 1. - norm.cdf(0.5, -line, 13.86)
probTie = norm.cdf(0.5, -line, 13.86) - norm.cdf(-0.5, -line, 13.86)
return 100. * (probWin + 0.5 * probTie) |
def gamelog(self, year=None, kind='R'):
"""Gets the career gamelog of the given player.
:kind: One of 'R', 'P', or 'B' (for regular season, playoffs, or both).
Case-insensitive; defaults to 'R'.
:year: The year for which the gamelog should be returned; if None,
return entire career gamelog. Defaults to None.
:returns: A DataFrame with the player's career gamelog.
"""
url = self._subpage_url('gamelog', None) # year is filtered later
doc = pq(sportsref.utils.get_html(url))
table = (doc('table#stats') if kind == 'R' else
doc('table#stats_playoffs'))
df = sportsref.utils.parse_table(table)
if year is not None:
df = df.query('year == @year').reset_index(drop=True)
return df |
def passing(self, kind='R'):
"""Gets yearly passing stats for the player.
:kind: One of 'R', 'P', or 'B'. Case-insensitive; defaults to 'R'.
:returns: Pandas DataFrame with passing stats.
"""
doc = self.get_doc()
table = (doc('table#passing') if kind == 'R' else
doc('table#passing_playoffs'))
df = sportsref.utils.parse_table(table)
return df |
def rushing_and_receiving(self, kind='R'):
"""Gets yearly rushing/receiving stats for the player.
:kind: One of 'R', 'P', or 'B'. Case-insensitive; defaults to 'R'.
:returns: Pandas DataFrame with rushing/receiving stats.
"""
doc = self.get_doc()
table = (doc('table#rushing_and_receiving') if kind == 'R'
else doc('table#rushing_and_receiving_playoffs'))
if not table:
table = (doc('table#receiving_and_rushing') if kind == 'R'
else doc('table#receiving_and_rushing_playoffs'))
df = sportsref.utils.parse_table(table)
return df |
def _plays(self, year, play_type, expand_details):
"""Returns a DataFrame of plays for a given year for a given play type
(like rushing, receiving, or passing).
:year: The year for the season.
:play_type: A type of play for which there are plays (as of this
writing, either "passing", "rushing", or "receiving")
:expand_details: Bool for whether PBP should be parsed.
:returns: A DataFrame of plays, each row is a play. Returns None if
there were no such plays in that year.
"""
url = self._subpage_url('{}-plays'.format(play_type), year)
doc = pq(sportsref.utils.get_html(url))
table = doc('table#all_plays')
if table:
if expand_details:
plays = sportsref.nfl.pbp.expand_details(
sportsref.utils.parse_table(table), detailCol='description'
)
return plays
else:
return sportsref.utils.parse_table(table)
else:
return None |
def advanced_splits(self, year=None):
"""Returns a DataFrame of advanced splits data for a player-year. Note:
only go back to 2012.
:year: The year for the season in question. If None, returns career
advanced splits.
:returns: A DataFrame of advanced splits data.
"""
# get the table
url = self._subpage_url('splits', year)
doc = pq(sportsref.utils.get_html(url))
table = doc('table#advanced_splits')
df = sportsref.utils.parse_table(table)
# cleaning the data
if not df.empty:
df.split_type.fillna(method='ffill', inplace=True)
return df |
def _simple_year_award(self, award_id):
"""Template for simple award functions that simply list years, such as
pro bowls and first-team all pro.
:award_id: The div ID that is appended to "leaderboard_" in selecting
the table's div.
:returns: List of years for the award.
"""
doc = self.get_doc()
table = doc('div#leaderboard_{} table'.format(award_id))
return list(map(int, sportsref.utils.parse_awards_table(table))) |
def team_names(year):
"""Returns a mapping from team ID to full team name for a given season.
Example of a full team name: "New England Patriots"
:year: The year of the season in question (as an int).
:returns: A dictionary with teamID keys and full team name values.
"""
doc = pq(sportsref.utils.get_html(sportsref.nfl.BASE_URL + '/teams/'))
active_table = doc('table#teams_active')
active_df = sportsref.utils.parse_table(active_table)
inactive_table = doc('table#teams_inactive')
inactive_df = sportsref.utils.parse_table(inactive_table)
df = pd.concat((active_df, inactive_df))
df = df.loc[~df['has_class_partial_table']]
ids = df.team_id.str[:3].values
names = [tr('th a') for tr in active_table('tr').items()]
names.extend(tr('th a') for tr in inactive_table('tr').items())
names = [_f for _f in names if _f]
names = [lst[0].text_content() for lst in names]
# combine IDs and team names into pandas series
series = pd.Series(names, index=ids)
# create a mask to filter to teams from the given year
mask = ((df.year_min <= year) & (year <= df.year_max)).values
# filter, convert to a dict, and return
return series[mask].to_dict() |
def team_ids(year):
"""Returns a mapping from team name to team ID for a given season. Inverse
mapping of team_names. Example of a full team name: "New England Patriots"
:year: The year of the season in question (as an int).
:returns: A dictionary with full team name keys and teamID values.
"""
names = team_names(year)
return {v: k for k, v in names.items()} |
def name(self):
"""Returns the real name of the franchise given the team ID.
Examples:
'nwe' -> 'New England Patriots'
'sea' -> 'Seattle Seahawks'
:returns: A string corresponding to the team's full name.
"""
doc = self.get_main_doc()
headerwords = doc('div#meta h1')[0].text_content().split()
lastIdx = headerwords.index('Franchise')
teamwords = headerwords[:lastIdx]
return ' '.join(teamwords) |
def roster(self, year):
"""Returns the roster table for the given year.
:year: The year for which we want the roster; defaults to current year.
:returns: A DataFrame containing roster information for that year.
"""
doc = self.get_year_doc('{}_roster'.format(year))
roster_table = doc('table#games_played_team')
df = sportsref.utils.parse_table(roster_table)
starter_table = doc('table#starters')
if not starter_table.empty:
start_df = sportsref.utils.parse_table(starter_table)
start_df = start_df.dropna(axis=0, subset=['position'])
starters = start_df.set_index('position').player_id
df['is_starter'] = df.player_id.isin(starters)
df['starting_pos'] = df.player_id.map(
lambda pid: (starters[starters == pid].index[0]
if pid in starters.values else None)
)
return df |
def boxscores(self, year):
"""Gets list of BoxScore objects corresponding to the box scores from
that year.
:year: The year for which we want the boxscores; defaults to current
year.
:returns: np.array of strings representing boxscore IDs.
"""
doc = self.get_year_doc(year)
table = doc('table#games')
df = sportsref.utils.parse_table(table)
if df.empty:
return np.array([])
return df.boxscore_id.values |
def _year_info_pq(self, year, keyword):
"""Returns a PyQuery object containing the info from the meta div at
the top of the team year page with the given keyword.
:year: Int representing the season.
:keyword: A keyword to filter to a single p tag in the meta div.
:returns: A PyQuery object for the selected p element.
"""
doc = self.get_year_doc(year)
p_tags = doc('div#meta div:not(.logo) p')
texts = [p_tag.text_content().strip() for p_tag in p_tags]
try:
return next(
pq(p_tag) for p_tag, text in zip(p_tags, texts)
if keyword.lower() in text.lower()
)
except StopIteration:
if len(texts):
raise ValueError('Keyword not found in any p tag.')
else:
raise ValueError('No meta div p tags found.') |
def head_coaches_by_game(self, year):
"""Returns head coach data by game.
:year: An int representing the season in question.
:returns: An array with an entry per game of the season that the team
played (including playoffs). Each entry is the head coach's ID for that
game in the season.
"""
coach_str = self._year_info_pq(year, 'Coach').text()
regex = r'(\S+?) \((\d+)-(\d+)-(\d+)\)'
coachAndTenure = []
m = True
while m:
m = re.search(regex, coach_str)
coachID, wins, losses, ties = m.groups()
nextIndex = m.end(4) + 1
coachStr = coachStr[nextIndex:]
tenure = int(wins) + int(losses) + int(ties)
coachAndTenure.append((coachID, tenure))
coachIDs = [
cID for cID, games in coachAndTenure for _ in range(games)
]
return np.array(coachIDs[::-1]) |
def wins(self, year):
"""Returns the # of regular season wins a team in a year.
:year: The year for the season in question.
:returns: The number of regular season wins.
"""
schedule = self.schedule(year)
if schedule.empty:
return np.nan
return schedule.query('week_num <= 17').is_win.sum() |
def schedule(self, year):
"""Returns a DataFrame with schedule information for the given year.
:year: The year for the season in question.
:returns: Pandas DataFrame with schedule information.
"""
doc = self.get_year_doc(year)
table = doc('table#games')
df = sportsref.utils.parse_table(table)
if df.empty:
return pd.DataFrame()
df = df.loc[df['week_num'].notnull()]
df['week_num'] = np.arange(len(df)) + 1
df['is_win'] = df['game_outcome'] == 'W'
df['is_loss'] = df['game_outcome'] == 'L'
df['is_tie'] = df['game_outcome'] == 'T'
df['is_bye'] = df['game_outcome'].isnull()
df['is_ot'] = df['overtime'].notnull()
return df |
def srs(self, year):
"""Returns the SRS (Simple Rating System) for a team in a year.
:year: The year for the season in question.
:returns: A float of SRS.
"""
try:
srs_text = self._year_info_pq(year, 'SRS').text()
except ValueError:
return None
m = re.match(r'SRS\s*?:\s*?(\S+)', srs_text)
if m:
return float(m.group(1))
else:
return None |
def sos(self, year):
"""Returns the SOS (Strength of Schedule) for a team in a year, based
on SRS.
:year: The year for the season in question.
:returns: A float of SOS.
"""
try:
sos_text = self._year_info_pq(year, 'SOS').text()
except ValueError:
return None
m = re.search(r'SOS\s*:\s*(\S+)', sos_text)
if m:
return float(m.group(1))
else:
return None |
def off_coordinator(self, year):
"""Returns the coach ID for the team's OC in a given year.
:year: An int representing the year.
:returns: A string containing the coach ID of the OC.
"""
try:
oc_anchor = self._year_info_pq(year, 'Offensive Coordinator')('a')
if oc_anchor:
return oc_anchor.attr['href']
except ValueError:
return None |
def def_coordinator(self, year):
"""Returns the coach ID for the team's DC in a given year.
:year: An int representing the year.
:returns: A string containing the coach ID of the DC.
"""
try:
dc_anchor = self._year_info_pq(year, 'Defensive Coordinator')('a')
if dc_anchor:
return dc_anchor.attr['href']
except ValueError:
return None |
def stadium(self, year):
"""Returns the ID for the stadium in which the team played in a given
year.
:year: The year in question.
:returns: A string representing the stadium ID.
"""
anchor = self._year_info_pq(year, 'Stadium')('a')
return sportsref.utils.rel_url_to_id(anchor.attr['href']) |
def off_scheme(self, year):
"""Returns the name of the offensive scheme the team ran in the given
year.
:year: Int representing the season year.
:returns: A string representing the offensive scheme.
"""
scheme_text = self._year_info_pq(year, 'Offensive Scheme').text()
m = re.search(r'Offensive Scheme[:\s]*(.+)\s*', scheme_text, re.I)
if m:
return m.group(1)
else:
return None |
def def_alignment(self, year):
"""Returns the name of the defensive alignment the team ran in the
given year.
:year: Int representing the season year.
:returns: A string representing the defensive alignment.
"""
scheme_text = self._year_info_pq(year, 'Defensive Alignment').text()
m = re.search(r'Defensive Alignment[:\s]*(.+)\s*', scheme_text, re.I)
if m:
return m.group(1)
else:
return None |
def team_stats(self, year):
"""Returns a Series (dict-like) of team stats from the team-season
page.
:year: Int representing the season.
:returns: A Series of team stats.
"""
doc = self.get_year_doc(year)
table = doc('table#team_stats')
df = sportsref.utils.parse_table(table)
if df.empty:
return pd.Series()
return df.loc[df.player_id == 'Team Stats'].iloc[0] |
def opp_stats(self, year):
"""Returns a Series (dict-like) of the team's opponent's stats from the
team-season page.
:year: Int representing the season.
:returns: A Series of team stats.
"""
doc = self.get_year_doc(year)
table = doc('table#team_stats')
df = sportsref.utils.parse_table(table)
return df.loc[df.player_id == 'Opp. Stats'].iloc[0] |
def off_splits(self, year):
"""Returns a DataFrame of offensive team splits for a season.
:year: int representing the season.
:returns: Pandas DataFrame of split data.
"""
doc = self.get_year_doc('{}_splits'.format(year))
tables = doc('table.stats_table')
dfs = [sportsref.utils.parse_table(table) for table in tables.items()]
dfs = [
df.assign(split=df.columns[0])
.rename(columns={df.columns[0]: 'split_value'})
for df in dfs
]
if not dfs:
return pd.DataFrame()
return pd.concat(dfs).reset_index(drop=True) |
def get_html(url):
"""Gets the HTML for the given URL using a GET request.
:url: the absolute URL of the desired page.
:returns: a string of HTML.
"""
global last_request_time
with throttle_process_lock:
with throttle_thread_lock:
# sleep until THROTTLE_DELAY secs have passed since last request
wait_left = THROTTLE_DELAY - (time.time() - last_request_time.value)
if wait_left > 0:
time.sleep(wait_left)
# make request
response = requests.get(url)
# update last request time for throttling
last_request_time.value = time.time()
# raise ValueError on 4xx status code, get rid of comments, and return
if 400 <= response.status_code < 500:
raise ValueError(
'Status Code {} received fetching URL "{}"'
.format(response.status_code, url)
)
html = response.text
html = html.replace('<!--', '').replace('-->', '')
return html |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.