Skip to content

Commit

Permalink
Merge pull request #74 from OCHA-DAP/HAPI-337
Browse files Browse the repository at this point in the history
HAPI-337
  • Loading branch information
b-j-mills authored Feb 8, 2024
2 parents 43eedbb + 85dca01 commit 7970359
Show file tree
Hide file tree
Showing 7 changed files with 60 additions and 55 deletions.
2 changes: 1 addition & 1 deletion .config/ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ ignore = [
]

[lint.isort]
known-local-folder = ["hapi"]
known-local-folder = ["hapi"]
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

## [0.7.1] - 2024-02-07

### Added

- Added all HRP countries to national risk theme

### Changed

- Linked national risk to admin 2 level

## [0.7.0] - 2024-01-30

### Added
Expand Down
30 changes: 12 additions & 18 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ click==8.1.7
colorama==0.4.6
# via typer
coverage[toml]==7.4.1
# via
# coverage
# pytest-cov
# via pytest-cov
cryptography==42.0.2
# via pyopenssl
defopt==6.4.0
Expand Down Expand Up @@ -63,9 +61,9 @@ google-auth-oauthlib==1.2.0
# via gspread
greenlet==3.0.3
# via sqlalchemy
gspread==6.0.0
gspread==6.0.1
# via hdx-python-scraper
hapi-schema==0.6.0
hapi-schema==0.6.1
# via hapi-pipelines (pyproject.toml)
hdx-python-api==6.2.1
# via hdx-python-scraper
Expand All @@ -75,9 +73,7 @@ hdx-python-country==3.6.4
# hdx-python-api
# hdx-python-scraper
hdx-python-database[postgresql]==1.2.9
# via
# hapi-pipelines (pyproject.toml)
# hdx-python-database
# via hapi-pipelines (pyproject.toml)
hdx-python-scraper==2.3.5
# via hapi-pipelines (pyproject.toml)
hdx-python-utilities==3.6.4
Expand Down Expand Up @@ -110,7 +106,7 @@ jsonschema==4.17.3
# via
# frictionless
# tableschema-to-template
libhxl==5.2
libhxl==5.2.1
# via
# hapi-pipelines (pyproject.toml)
# hdx-python-api
Expand All @@ -123,7 +119,7 @@ markdown-it-py==3.0.0
# via rich
marko==2.0.2
# via frictionless
markupsafe==2.1.4
markupsafe==2.1.5
# via jinja2
mdurl==0.1.2
# via markdown-it-py
Expand Down Expand Up @@ -153,9 +149,9 @@ pockets==0.9.1
# via sphinxcontrib-napoleon
pre-commit==3.6.0
# via hapi-pipelines (pyproject.toml)
psycopg[binary]==3.1.17
psycopg[binary]==3.1.18
# via hdx-python-database
psycopg-binary==3.1.17
psycopg-binary==3.1.18
# via psycopg
pyasn1==0.5.1
# via
Expand All @@ -167,11 +163,11 @@ pyasn1-modules==0.3.0
# via google-auth
pycparser==2.21
# via cffi
pydantic==2.6.0
pydantic==2.6.1
# via
# frictionless
# inflect
pydantic-core==2.16.1
pydantic-core==2.16.2
# via pydantic
pygments==2.17.2
# via rich
Expand Down Expand Up @@ -229,7 +225,7 @@ rich==13.7.0
# via typer
rsa==4.9
# via google-auth
ruamel-yaml==0.18.5
ruamel-yaml==0.18.6
# via hdx-python-utilities
ruamel-yaml-clib==0.2.8
# via ruamel-yaml
Expand Down Expand Up @@ -264,9 +260,7 @@ tabulate==0.9.0
text-unidecode==1.3
# via python-slugify
typer[all]==0.9.0
# via
# frictionless
# typer
# via frictionless
typing-extensions==4.9.0
# via
# frictionless
Expand Down
2 changes: 1 addition & 1 deletion src/hapi/pipelines/app/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def output(self):
national_risk = NationalRisk(
session=self.session,
metadata=self.metadata,
locations=self.locations,
admins=self.admins,
results=results,
)
national_risk.populate()
2 changes: 1 addition & 1 deletion src/hapi/pipelines/configs/national_risk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ national_risk_national:
end: "31/12/2024"
filter_cols:
- "ISO3"
prefilter: "ISO3 in ['AFG', 'BFA', 'MLI', 'NGA', 'TCD', 'YEM']"
prefilter: "ISO3 in ['AFG', 'BFA', 'CAF', 'CMR', 'COD', 'COL', 'ETH', 'GTM', 'HND', 'HTI', 'MLI', 'MMR', 'MOZ', 'NER', 'NGA', 'PSE', 'SDN', 'SLV', 'SOM', 'SSD', 'SYR', 'TCD', 'UKR', 'VEN', 'YEM']"
admin:
- "ISO3"
admin_exact: True
Expand Down
67 changes: 34 additions & 33 deletions src/hapi/pipelines/database/national_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from hapi_schema.db_national_risk import DBNationalRisk
from sqlalchemy.orm import Session

from . import locations
from . import admins
from .base_uploader import BaseUploader
from .metadata import Metadata

Expand All @@ -18,59 +18,60 @@ def __init__(
self,
session: Session,
metadata: Metadata,
locations: locations.Locations,
admins: admins.Admins,
results: Dict,
):
super().__init__(session)
self._metadata = metadata
self._locations = locations
self._admins = admins
self._results = results

def populate(self):
logger.info("Populating national risk table")
for dataset in self._results.values():
datasetinfo = self._metadata.runner.scrapers[
"national_risk_national"
].datasetinfo
time_period_start = datasetinfo["source_date"]["default_date"][
"start"
]
time_period_end = datasetinfo["source_date"]["default_date"]["end"]
time_period_start = dataset["time_period"]["start"]
time_period_end = dataset["time_period"]["end"]
for admin_level, admin_results in dataset["results"].items():
resource_id = admin_results["hapi_resource_metadata"]["hdx_id"]
hxl_tags = admin_results["headers"][1]
locations = list(admin_results["values"][0].keys())
values = {
hxl_tag: value
for hxl_tag, value in zip(
hxl_tags, admin_results["values"]
)
}
admin_codes = list(admin_results["values"][0].keys())
values = admin_results["values"]

for location in locations:
risk_class = values["#risk+class"].get(location)
for admin_code in admin_codes:
admin2_code = admins.get_admin2_code_based_on_level(
admin_code=admin_code, admin_level=admin_level
)
risk_class = values[hxl_tags.index("#risk+class")].get(
admin_code
)
if risk_class:
risk_class = _get_risk_class_code_from_data(risk_class)

national_risk_row = DBNationalRisk(
resource_ref=self._metadata.resource_data[resource_id],
location_ref=self._locations.data[location],
admin2_ref=self._admins.admin2_data[admin2_code],
risk_class=risk_class,
global_rank=values["#risk+rank"][location],
overall_risk=values["#risk+total"][location],
hazard_exposure_risk=values["#risk+hazard"][location],
vulnerability_risk=values["#risk+vulnerability"][
location
],
coping_capacity_risk=values["#risk+coping+capacity"][
location
],
global_rank=values[hxl_tags.index("#risk+rank")].get(
admin_code
),
overall_risk=values[hxl_tags.index("#risk+total")].get(
admin_code
),
hazard_exposure_risk=values[
hxl_tags.index("#risk+hazard")
].get(admin_code),
vulnerability_risk=values[
hxl_tags.index("#risk+vulnerability")
].get(admin_code),
coping_capacity_risk=values[
hxl_tags.index("#risk+coping+capacity")
].get(admin_code),
meta_missing_indicators_pct=values[
"#meta+missing+indicators+pct"
].get(location),
hxl_tags.index("#meta+missing+indicators+pct")
].get(admin_code),
meta_avg_recentness_years=values[
"#meta+recentness+avg"
].get(location),
hxl_tags.index("#meta+recentness+avg")
].get(admin_code),
reference_period_start=time_period_start,
reference_period_end=time_period_end,
# TODO: For v2+, add to scraper (HAPI-199)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def test_pipelines(self, configuration, folder):
count = session.scalar(
select(func.count(DBNationalRisk.id))
)
assert count == 6
assert count == 25

org_mapping = pipelines.org._org_lookup
assert org_mapping["Action against Hunger"] == {
Expand Down

0 comments on commit 7970359

Please sign in to comment.