Skip to content

Commit

Permalink
Simplify national risk
Browse files Browse the repository at this point in the history
  • Loading branch information
b-j-mills committed Jan 18, 2024
1 parent 0177626 commit d8f752d
Showing 1 changed file with 26 additions and 22 deletions.
48 changes: 26 additions & 22 deletions src/hapi/pipelines/database/national_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import Dict

from hapi_schema.db_national_risk import DBNationalRisk
from hdx.utilities.dictandlist import dict_of_dicts_add
from sqlalchemy.orm import Session

from . import locations
Expand Down Expand Up @@ -40,35 +39,40 @@ def populate(self):
"end"
]
for admin_level, admin_results in dataset["results"].items():
rows = dict()
resource_id = admin_results["hapi_resource_metadata"]["hdx_id"]
for hxl_tag, values in zip(
admin_results["headers"][1], admin_results["values"]
):
for admin_code, value in values.items():
if hxl_tag == "#risk+class":
value = _get_risk_class_code_from_data(value)
dict_of_dicts_add(rows, admin_code, hxl_tag, value)
for location in rows:
hxl_tags = admin_results["headers"][1]
locations = list(admin_results["values"][0].keys())
values = {
hxl_tag: value
for hxl_tag, value in zip(
hxl_tags, admin_results["values"]
)
}

for location in locations:
risk_class = values["#risk+class"].get(location)
if risk_class:
risk_class = _get_risk_class_code_from_data(risk_class)

national_risk_row = DBNationalRisk(
resource_ref=self._metadata.resource_data[resource_id],
location_ref=self._locations.data[location],
risk_class=rows[location]["#risk+class"],
global_rank=rows[location]["#risk+rank"],
overall_risk=rows[location]["#risk+total"],
hazard_exposure_risk=rows[location]["#risk+hazard"],
vulnerability_risk=rows[location][
"#risk+vulnerability"
risk_class=risk_class,
global_rank=values["#risk+rank"][location],
overall_risk=values["#risk+total"][location],
hazard_exposure_risk=values["#risk+hazard"][location],
vulnerability_risk=values["#risk+vulnerability"][
location
],
coping_capacity_risk=rows[location][
"#risk+coping+capacity"
coping_capacity_risk=values["#risk+coping+capacity"][
location
],
meta_missing_indicators_pct=rows[location].get(
meta_missing_indicators_pct=values[
"#meta+missing+indicators+pct"
),
meta_avg_recentness_years=rows[location].get(
].get(location),
meta_avg_recentness_years=values[
"#meta+recentness+avg"
),
].get(location),
reference_period_start=reference_period_start,
reference_period_end=reference_period_end,
# TODO: For v2+, add to scraper (HAPI-199)
Expand Down

0 comments on commit d8f752d

Please sign in to comment.