-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
EES-4727: Tidying up the data_ingestion code by adding types and spli…
…tting up methods for parsing content.
- Loading branch information
Showing
9 changed files
with
162 additions
and
148 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,60 +1,71 @@ | ||
import logging | ||
from typing import Dict, List | ||
|
||
import requests | ||
from bs4 import BeautifulSoup | ||
|
||
from ..config import settings | ||
from .tablebuilder_service import fetch_key_stat | ||
from .tablebuilder_service import fetch_data_block | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def extract_releases(slugs: str) -> List[Dict]: | ||
texts = [] | ||
for slug in slugs: | ||
slug_info = {} | ||
res = requests.get(f"{settings.ees_url_api_content}/publications/{slug}/releases/latest") | ||
key_stats = {} | ||
response_json = res.json() | ||
release_id = response_json["publication"]["releases"][0]["id"] | ||
try: | ||
key_statistics = response_json["keyStatistics"] | ||
if key_statistics != []: | ||
data_strings = [] | ||
for i, statistic in enumerate(key_statistics): | ||
data_strings.append(fetch_key_stat(statistic, release_id, i)) | ||
key_stats["data"] = "Key Statistics section: ".join(data_strings) | ||
except KeyError: | ||
logger.warn(f"{slug} doesnt contain key stats") | ||
try: | ||
slug_info["text"] = key_stats["data"] | ||
content = fetch_release(slug, response_json) | ||
slug_info["text"] += content["data"] | ||
slug_info["link"] = content["link"] | ||
except Exception: | ||
logger.warn(f"{slug} doesnt contain key stats") | ||
content = fetch_release(slug, response_json) | ||
slug_info["text"] = content["data"] | ||
slug_info["link"] = content["link"] | ||
texts.append(slug_info) | ||
return texts | ||
|
||
|
||
def fetch_release(slug: str, res: dict) -> dict: | ||
slug_content = {} | ||
slug_content["link"] = f"{settings.ees_url_public_ui}/find-statistics/{slug}" | ||
try: | ||
slug_content["data"] = "Headlines Section: " | ||
slug_content["data"] += BeautifulSoup(res["headlinesSection"]["content"][0]["body"], "html.parser").get_text() | ||
except Exception as e: | ||
logger.info(f" Error: {e}. For {slug} the headlines section doesnt exist") | ||
|
||
slug_content["data"] += "Content Section" | ||
for i in range(len(res["content"])): | ||
for j in range(len(res["content"][i]["content"])): | ||
try: | ||
slug_content["data"] += BeautifulSoup(res["content"][i]["content"][j]["body"], "html.parser").get_text() | ||
except KeyError: | ||
logger.debug(f"Key does not exist for {slug} at {i}") | ||
return slug_content | ||
def extract_releases(slugs: list[str]) -> list[dict[str, str]]: | ||
return list(map(fetch_release, slugs)) | ||
|
||
|
||
def fetch_release(slug: str) -> dict[str, str]: | ||
response = requests.get(url=f"{settings.ees_url_api_content}/publications/{slug}/releases/latest") | ||
response.raise_for_status() | ||
response_json = response.json() | ||
release_id = response_json["id"] | ||
|
||
logger.debug(f"Processing content for release id: {release_id}") | ||
|
||
headlines_content = str(get_headlines_content(res=response_json)) | ||
key_stats_content = get_key_statistics_content(release_id=release_id, res=response_json) | ||
general_content = get_general_content(res=response_json) | ||
|
||
return { | ||
"link": f"{settings.ees_url_public_ui}/find-statistics/{slug}", | ||
"text": f"{headlines_content}{key_stats_content}{general_content}", | ||
} | ||
|
||
|
||
def get_headlines_content(res: dict) -> str | None: | ||
headlines_section = res["headlinesSection"]["content"] | ||
if headlines_section: | ||
headlines_content_block = headlines_section[0] | ||
headlines = BeautifulSoup(markup=headlines_content_block["body"], features="html.parser").get_text() | ||
return f"Headline: {headlines}" | ||
|
||
|
||
def get_key_statistics_content(release_id: str, res: dict) -> str | None: | ||
key_statistics = res["keyStatistics"] | ||
if key_statistics: | ||
key_statistics_content = list( | ||
map( | ||
lambda item: get_key_statistic_content(release_id=release_id, index_and_key_statistic=item), | ||
enumerate(key_statistics), | ||
) | ||
) | ||
return "Key statistic ".join(key_statistics_content) | ||
|
||
|
||
def get_key_statistic_content(release_id: str, index_and_key_statistic: tuple[int, dict[str, str]]) -> str: | ||
index, key_statistic = index_and_key_statistic | ||
data_block_id = key_statistic["dataBlockId"] | ||
return fetch_data_block( | ||
release_id=release_id, data_block_id=data_block_id, key_statistic=key_statistic, index=index | ||
) | ||
|
||
|
||
def get_general_content(res: dict) -> str: | ||
content_sections = res["content"] | ||
result = "Content: " | ||
for section_index in range(len(content_sections)): | ||
content_blocks = content_sections[section_index]["content"] | ||
for block_index in range(len(content_blocks)): | ||
content_block = content_blocks[block_index] | ||
if content_block["type"] == "HtmlBlock": | ||
result += BeautifulSoup(markup=content_block["body"], features="html.parser").get_text() | ||
return result |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.