Skip to content

Commit

Permalink
fixed report settings typo and feature data handling logic
Browse files Browse the repository at this point in the history
  • Loading branch information
uberfastman committed Oct 9, 2024
1 parent 4f0cacf commit 0bc9908
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 47 deletions.
2 changes: 1 addition & 1 deletion compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
services:

app:
image: ghcr.io/uberfastman/fantasy-football-metrics-weekly-report:19.0.1
image: ghcr.io/uberfastman/fantasy-football-metrics-weekly-report:19.0.2
platform: linux/amd64
ports:
- "5001:5000"
Expand Down
31 changes: 21 additions & 10 deletions features/base/feature.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import json
import os
from abc import ABC, abstractmethod
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Any

Expand Down Expand Up @@ -40,26 +41,36 @@ def __init__(self, feature_type: str, feature_web_base_url: str, data_dir: Path,
self.player_name_punctuation: List[str] = [".", "'"]
self.player_name_suffixes: List[str] = ["Jr", "Sr", "V", "IV", "III", "II", "I"] # ordered for str.removesuffix

# fetch feature data from the web if not running in offline mode or if refresh=True
if not self.offline and self.refresh:
if not self.feature_data:
logger.debug(f"Retrieving {self.feature_type_title} data from the web.")
start = datetime.now()

data_retrieved_from_web = False
if not self.offline:
if not self.feature_data_file_path.is_file() or self.refresh:
logger.info(f"Retrieving {self.feature_type_title} data from {self.feature_web_base_url}...")
# fetch feature data from the web
self._get_feature_data()
data_retrieved_from_web = True

if self.save_data:
self._save_feature_data()
# if offline=True or refresh=False load saved feature data (must have previously run application with -s flag)
else:
# load saved feature data (must have previously run application with -s flag)
self._load_feature_data()
else:
# load saved feature data (must have previously run application with -s flag)
self._load_feature_data()

if len(self.feature_data) == 0:
logger.warning(
f"No {self.feature_type_title} data records were loaded, please check your internet connection or the "
f"availability of {self.feature_web_base_url} and try generating a new report."
f"...{'retrieved' if data_retrieved_from_web else 'loaded'} 0 {self.feature_type_title} data records. "
f"Please check your internet connection or the availability of {self.feature_web_base_url} and try "
f"generating a new report."
)
else:
logger.info(f"{len(self.feature_data)} feature data records loaded")
logger.info(
f"...{'retrieved' if data_retrieved_from_web else 'loaded'} {len(self.feature_data)} "
f"{self.feature_type_title} data records in {datetime.now() - start}."
)

def __str__(self):
return json.dumps(self.feature_data, indent=2, ensure_ascii=False)
Expand All @@ -68,7 +79,7 @@ def __repr__(self):
return json.dumps(self.feature_data, indent=2, ensure_ascii=False)

def _load_feature_data(self) -> None:
logger.debug(f"Loading saved {self.feature_type_title} data...")
logger.info(f"Loading saved {self.feature_type_title} data...")

if self.feature_data_file_path.is_file():
with open(self.feature_data_file_path, "r", encoding="utf-8") as feature_data_in:
Expand All @@ -80,7 +91,7 @@ def _load_feature_data(self) -> None:
)

def _save_feature_data(self) -> None:
logger.debug(f"Saving {self.feature_type_title} data and raw {self.feature_type_title} data.")
logger.debug(f"Saving {self.feature_type_title} data...")

# create output data directory if it does not exist
if not self.data_dir.is_dir():
Expand Down
40 changes: 5 additions & 35 deletions report/builder.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
__author__ = "Wren J. R. (uberfastman)"
__email__ = "[email protected]"

import datetime
import os
from collections import defaultdict
from datetime import datetime
from pathlib import Path
from typing import List

Expand Down Expand Up @@ -92,10 +92,10 @@ def __init__(self,
f"{f' dq_ce: {self.dq_ce}{f_str_newline}'}"
f"{f' offline: {self.offline}{f_str_newline}'}"
f"{f' test: {self.test}{f_str_newline}'}"
f"on {datetime.datetime.now():%b %d, %Y}..."
f"on {datetime.now():%b %d, %Y}..."
)

begin = datetime.datetime.now()
begin = datetime.now()
logger.info(
f"Retrieving fantasy football data from {self.platform_display} {'API' if not self.offline else 'saved data'}..."
)
Expand All @@ -114,7 +114,7 @@ def __init__(self,
offline=self.offline
)

delta = datetime.datetime.now() - begin
delta = datetime.now() - begin
logger.info(
f"...retrieved all fantasy football data from "
f"{self.platform_display + (' API' if not self.offline else ' saved data')} in {delta}\n"
Expand All @@ -127,49 +127,19 @@ def __init__(self,
self.playoff_probs = None

if settings.report_settings.league_bad_boy_rankings_bool:
begin = datetime.datetime.now()
logger.info(
f"Retrieving bad boy data from https://www.usatoday.com/sports/nfl/arrests/ "
f"{'website' if not self.offline or self.refresh_web_data else 'saved data'}..."
)
self.bad_boy_stats = self.league.get_bad_boy_stats(self.refresh_web_data, self.save_data, self.offline)
delta = datetime.datetime.now() - begin
logger.info(
f"...retrieved all bad boy data from https://www.usatoday.com/sports/nfl/arrests/ "
f"{'website' if not self.offline else 'saved data'} in {delta}\n"
)
else:
self.bad_boy_stats = None

if settings.report_settings.league_beef_rankings_bool:
begin = datetime.datetime.now()
logger.info(
f"Retrieving beef data from Sleeper "
f"{'API' if not self.offline or self.refresh_web_data else 'saved data'}..."
)
self.beef_stats = self.league.get_beef_stats(self.refresh_web_data, self.save_data, self.offline)
delta = datetime.datetime.now() - begin
logger.info(
f"...retrieved all beef data from Sleeper "
f"{'API' if not self.offline else 'saved data'} in {delta}\n"
)
else:
self.beef_stats = None

if settings.report_settings.league_high_roller_rankings_bool:
begin = datetime.datetime.now()
logger.info(
f"Retrieving high roller data from https://www.spotrac.com/nfl/fines "
f"{'website' if not self.offline or self.refresh_web_data else 'saved data'}..."
)
self.high_roller_stats = self.league.get_high_roller_stats(
self.refresh_web_data, self.save_data, self.offline
)
delta = datetime.datetime.now() - begin
logger.info(
f"...retrieved all high roller data from https://www.spotrac.com/nfl/fines "
f"{'website' if not self.offline else 'saved data'} in {delta}\n"
)
else:
self.high_roller_stats = None

Expand Down Expand Up @@ -394,7 +364,7 @@ def create_pdf_report(self) -> Path:
report_title_text = f"{self.league.name} ({self.league_id}) Week {self.league.week_for_report} Report"
report_footer_text = (
f"<para alignment='center'>"
f"Report generated {datetime.datetime.now():%Y-%b-%d %H:%M:%S} for {self.platform_display} "
f"Report generated {datetime.now():%Y-%b-%d %H:%M:%S} for {self.platform_display} "
f"Fantasy Football league \"{self.league.name}\" with id {self.league_id} "
f"(<a href=\"{self.league.url}\" color=blue><u>{self.league.url}</u></a>)."
f"<br></br><br></br><br></br>"
Expand Down
2 changes: 1 addition & 1 deletion report/pdf/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1783,7 +1783,7 @@ def generate_pdf(self, filename_with_path: Path, line_chart_data_list: List[List

if (settings.report_settings.league_bad_boy_rankings_bool
or settings.report_settings.league_beef_rankings_bool
or settings.reportsettings.league_high_roller_rankings_bool):
or settings.report_settings.league_high_roller_rankings_bool):
elements.append(self.add_page_break())

if settings.report_settings.league_weekly_top_scorers_bool:
Expand Down

0 comments on commit 0bc9908

Please sign in to comment.