Skip to content

Commit

Permalink
Merge pull request #170 from OCHA-DAP/HDXDSYS-1043-returnees
Browse files Browse the repository at this point in the history
HDXDSYS-1043 add returnees
  • Loading branch information
turnerm authored Sep 23, 2024
2 parents 27eda92 + 7341688 commit 68f11ef
Show file tree
Hide file tree
Showing 9 changed files with 229,775 additions and 42,334 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

## [0.10.1] - 2024-09-20

### Changed

- Split refugees into refugees and returnees

## [0.10.0] - 2024-09-19

### Added
Expand Down
2 changes: 1 addition & 1 deletion src/hapi/pipelines/app/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def main(
"operational_presence.yaml",
"population.yaml",
"poverty_rate.yaml",
"refugees.yaml",
"refugees_and_returnees.yaml",
"wfp.yaml",
]
project_config_dict = load_yamls(project_configs)
Expand Down
19 changes: 11 additions & 8 deletions src/hapi/pipelines/app/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from hapi.pipelines.database.org_type import OrgType
from hapi.pipelines.database.population import Population
from hapi.pipelines.database.poverty_rate import PovertyRate
from hapi.pipelines.database.refugees import Refugees
from hapi.pipelines.database.refugees_and_returnees import RefugeesAndReturnees
from hapi.pipelines.database.sector import Sector
from hapi.pipelines.database.wfp_commodity import WFPCommodity
from hapi.pipelines.database.wfp_market import WFPMarket
Expand Down Expand Up @@ -174,7 +174,7 @@ def _create_configurable_scrapers(
_create_configurable_scrapers("operational_presence", "national")
_create_configurable_scrapers("national_risk", "national")
_create_configurable_scrapers("funding", "national")
_create_configurable_scrapers("refugees", "national")
_create_configurable_scrapers("refugees_and_returnees", "national")
_create_configurable_scrapers("idps", "national")
_create_configurable_scrapers(
"idps", "adminone", adminlevel=self.adminone
Expand Down Expand Up @@ -265,18 +265,21 @@ def output_national_risk(self):
)
national_risk.populate()

def output_refugees(self):
if not self.themes_to_run or "refugees" in self.themes_to_run:
def output_refugees_and_returnees(self):
if (
not self.themes_to_run
or "refugees_and_returnees" in self.themes_to_run
):
results = self.runner.get_hapi_results(
self.configurable_scrapers["refugees"]
self.configurable_scrapers["refugees_and_returnees"]
)
refugees = Refugees(
refugees_and_returnees = RefugeesAndReturnees(
session=self.session,
metadata=self.metadata,
locations=self.locations,
results=results,
)
refugees.populate()
refugees_and_returnees.populate()

def output_idps(self):
if not self.themes_to_run or "idps" in self.themes_to_run:
Expand Down Expand Up @@ -372,7 +375,7 @@ def output(self):
self.output_food_security()
self.output_humanitarian_needs()
self.output_national_risk()
self.output_refugees()
self.output_refugees_and_returnees()
self.output_idps()
self.output_funding()
self.output_poverty_rate()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
refugees_national:
refugees:
refugees_and_returnees_national:
refugees_and_returnees:
dataset: "unhcr-population-data-for-world"
resource: "Demographics and locations of forcibly displaced and stateless persons (Global)"
format: "csv"
Expand Down
5 changes: 2 additions & 3 deletions src/hapi/pipelines/database/idps.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,8 @@ def populate(self) -> None:
)
if duplicate_row_check in duplicate_rows:
text = (
f"Duplicate row for admin code {admin2_code}, assessment type {assessment_type}, "
f"reporting round {reporting_round}, operation {operation}, reporting round "
f"{reporting_round}"
f"Duplicate row for admin code {admin_code}, assessment type {assessment_type}, "
f"reporting round {reporting_round}, operation {operation}"
)
add_message(errors, dataset_name, text)
continue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
from typing import Dict

from hapi_schema.db_refugees import DBRefugees
from hapi_schema.db_returnees import DBReturnees
from hapi_schema.utils.enums import PopulationGroup
from hdx.utilities.dateparse import parse_date
from hdx.utilities.dictandlist import dict_of_lists_add
from sqlalchemy.orm import Session
Expand All @@ -19,7 +21,27 @@
logger = getLogger(__name__)


class Refugees(BaseUploader):
REFUGEE_POPULATION_GROUPS = [
PopulationGroup.REFUGEES.value,
PopulationGroup.ROC.value,
PopulationGroup.ASYLUM_SEEKERS.value,
PopulationGroup.OIP.value,
PopulationGroup.IOC.value,
PopulationGroup.STATELESS.value,
PopulationGroup.OOC.value,
PopulationGroup.HOST_COMMUNITY.value,
PopulationGroup.RESETTLED.value,
PopulationGroup.NATURALIZED.value,
]

RETURNEE_POPULATION_GROUPS = [
PopulationGroup.RET.value,
PopulationGroup.RDP.value,
PopulationGroup.RRI.value,
]


class RefugeesAndReturnees(BaseUploader):
def __init__(
self,
session: Session,
Expand Down Expand Up @@ -76,13 +98,19 @@ def populate(self) -> None:
),
int(population),
)

for key in rows:
refugees_row = DBRefugees(
population_group = key[2]
if population_group in REFUGEE_POPULATION_GROUPS:
db_table = DBRefugees
elif population_group in RETURNEE_POPULATION_GROUPS:
db_table = DBReturnees
else:
continue
refugees_and_returnees_row = db_table(
resource_hdx_id=resource_id,
origin_location_ref=self._locations.data[key[0]],
asylum_location_ref=self._locations.data[key[1]],
population_group=key[2],
population_group=population_group,
gender=gender,
age_range=age_range,
min_age=min_age,
Expand All @@ -96,5 +124,5 @@ def populate(self) -> None:
),
)

self._session.add(refugees_row)
self._session.add(refugees_and_returnees_row)
self._session.commit()
Loading

0 comments on commit 68f11ef

Please sign in to comment.