diff --git a/hdx_hapi/config/__init__.py b/hdx_hapi/config/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/hdx_hapi/config/config.py b/hdx_hapi/config/config.py
index 7027dc43..258cb4fa 100644
--- a/hdx_hapi/config/config.py
+++ b/hdx_hapi/config/config.py
@@ -1,8 +1,9 @@
-from dataclasses import dataclass
-from mixpanel import Mixpanel
import logging
import os
+from dataclasses import dataclass
+from mixpanel import Mixpanel
+
from hdx_hapi.config.helper import create_pg_uri_from_env_without_protocol
logger = logging.getLogger(__name__)
@@ -18,6 +19,7 @@ class Config:
HDX_DATASET_URL: str
HDX_DATASET_API_URL: str
HDX_ORGANIZATION_URL: str
+ HDX_ORGANIZATION_API_URL: str
HDX_RESOURCE_URL: str
HDX_RESOURCE_API_URL: str
@@ -28,7 +30,7 @@ class Config:
HAPI_IDENTIFIER_FILTERING: bool
- HDX_MIXPANEL_TOKEN: str
+ MIXPANEL: Mixpanel
CONFIG = None
@@ -41,6 +43,7 @@ def get_config() -> Config:
sql_alchemy_asyncypg_db_uri = f'postgresql+asyncpg://{db_uri_without_protocol}'
sql_alchemy_psycopg2_db_uri = f'postgresql+psycopg2://{db_uri_without_protocol}'
+ mixpanel_token = os.getenv('HDX_MIXPANEL_TOKEN', '')
CONFIG = Config(
SQL_ALCHEMY_ASYNCPG_DB_URI=sql_alchemy_asyncypg_db_uri,
SQL_ALCHEMY_PSYCOPG2_DB_URI=sql_alchemy_psycopg2_db_uri,
@@ -52,14 +55,15 @@ def get_config() -> Config:
'HDX_RESOURCE_API_URL', '{domain}/api/action/resource_show?id={resource_id}'
),
HDX_ORGANIZATION_URL=os.getenv('HDX_ORGANIZATION_URL', '{domain}/organization/{org_id}'),
+ HDX_ORGANIZATION_API_URL=os.getenv(
+ 'HDX_ORGANIZATION_API_URL', '{domain}/api/action/organization_show?id={org_id}'
+ ),
HAPI_READTHEDOCS_OVERVIEW_URL=os.getenv(
'HAPI_READTHEDOCS_OVERVIEW_URL', 'https://hdx-hapi.readthedocs.io/en/latest/'
),
HAPI_SERVER_URL=os.getenv('HAPI_SERVER_URL', None),
HAPI_IDENTIFIER_FILTERING=os.getenv('HAPI_IDENTIFIER_FILTERING', 'True').lower() == 'true',
- HDX_MIXPANEL_TOKEN=os.getenv('HDX_MIXPANEL_TOKEN', ''),
+ MIXPANEL=Mixpanel(mixpanel_token) if mixpanel_token else None,
)
return CONFIG
-
-mixpanel = Mixpanel(get_config().HDX_MIXPANEL_TOKEN)
diff --git a/hdx_hapi/config/doc_snippets.py b/hdx_hapi/config/doc_snippets.py
index 457a1891..bf30b338 100644
--- a/hdx_hapi/config/doc_snippets.py
+++ b/hdx_hapi/config/doc_snippets.py
@@ -7,9 +7,10 @@
DOC_ADMIN2_CODE = 'Filter the response by the 2nd subnational administrative divisions. The admin2 codes refer to the p-codes in the Common Operational Datasets.'
DOC_ADMIN2_NAME = 'Filter the response by the 1st subnational administrative divisions. The admin2 names refer to the Common Operational Datasets.'
DOC_AGE_RANGE_SUMMARY = 'Get the list of age ranges used for disaggregating population data'
-DOC_AGE_RANGE_CODE = 'Filter the response by the age range. These are expressed as [start year]-[end year]. The end year is assumed to be inclusive, though that is not always explicit in the source data.'
+DOC_AGE_RANGE = 'Filter the response by the age range. These are expressed as [start year]-[end year]. The end year is assumed to be inclusive, though that is not always explicit in the source data.'
DOC_GENDER_SUMMARY = 'Get the list of gender codes used for disaggregating population data'
DOC_GENDER_CODE = 'Filter the response by the gender code.'
+DOC_GENDER = 'Filter the response by the gender.'
DOC_GENDER_DESCRIPTION = 'Filter the response by the gender description.'
DOC_HDX_DATASET_ID = 'Filter the response by the dataset ID, which is a unique and fixed identifier of a Dataset on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset id]` will load the dataset page on HDX.'
DOC_HDX_DATASET_NAME = 'Filter the response by the URL-safe name of the dataset as displayed on HDX. This name is unique but can change. A URL in the pattern of `https://data.humdata.org/dataset/[dataset name]` will load the dataset page on HDX.'
diff --git a/hdx_hapi/db/dao/admin1_view_dao.py b/hdx_hapi/db/dao/admin1_view_dao.py
index 017454ad..ea36847b 100644
--- a/hdx_hapi/db/dao/admin1_view_dao.py
+++ b/hdx_hapi/db/dao/admin1_view_dao.py
@@ -1,12 +1,11 @@
-from datetime import datetime
import logging
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_admin1_view import Admin1View
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.db.models.views.all_views import Admin1View
+from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
logger = logging.getLogger(__name__)
@@ -14,13 +13,10 @@
async def admin1_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
location_code: str = None,
location_name: str = None,
):
@@ -37,19 +33,13 @@ async def admin1_view_list(
query = case_insensitive_filter(query, Admin1View.code, code)
if name:
query = query.where(Admin1View.name.icontains(name))
- if hapi_updated_date_min:
- query = query.where(Admin1View.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(Admin1View.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(Admin1View.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(Admin1View.hapi_replaced_date < hapi_replaced_date_max)
if location_code:
query = case_insensitive_filter(query, Admin1View.location_code, location_code)
if location_name:
query = query.where(Admin1View.location_name.icontains(location_name))
+ query = apply_reference_period_filter(query, ref_period_parameters, Admin1View)
+
query = apply_pagination(query, pagination_parameters)
logger.debug(f'Executing SQL query: {query}')
diff --git a/hdx_hapi/db/dao/admin2_view_dao.py b/hdx_hapi/db/dao/admin2_view_dao.py
index c6bda041..3c6e58bb 100644
--- a/hdx_hapi/db/dao/admin2_view_dao.py
+++ b/hdx_hapi/db/dao/admin2_view_dao.py
@@ -1,12 +1,11 @@
-from datetime import datetime
import logging
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_admin2_view import Admin2View
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.db.models.views.all_views import Admin2View
+from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
logger = logging.getLogger(__name__)
@@ -14,13 +13,10 @@
async def admin2_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
admin1_code: str = None,
admin1_name: str = None,
location_code: str = None,
@@ -39,14 +35,6 @@ async def admin2_view_list(
query = case_insensitive_filter(query, Admin2View.code, code)
if name:
query = query.where(Admin2View.name.icontains(name))
- if hapi_updated_date_min:
- query = query.where(Admin2View.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(Admin2View.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(Admin2View.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(Admin2View.hapi_replaced_date < hapi_replaced_date_max)
if admin1_code:
query = case_insensitive_filter(query, Admin2View.admin1_code, admin1_code)
if admin1_name:
@@ -56,6 +44,8 @@ async def admin2_view_list(
if location_name:
query = query.where(Admin2View.location_name.icontains(location_name))
+ query = apply_reference_period_filter(query, ref_period_parameters, Admin2View)
+
query = apply_pagination(query, pagination_parameters)
logger.debug(f'Executing SQL query: {query}')
diff --git a/hdx_hapi/db/dao/age_range_view_dao.py b/hdx_hapi/db/dao/age_range_view_dao.py
deleted file mode 100644
index c4574734..00000000
--- a/hdx_hapi/db/dao/age_range_view_dao.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import logging
-
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import select
-
-from hdx_hapi.db.models.views.db_age_range_view import AgeRangeView
-from hdx_hapi.db.dao.util.util import apply_pagination
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-logger = logging.getLogger(__name__)
-
-
-async def age_ranges_view_list(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
-):
- logger.info(f'age_ranges_view_list called with params: code={code}')
-
- query = select(AgeRangeView)
- if code:
- query = query.where(AgeRangeView.code == code)
- query = apply_pagination(query, pagination_parameters)
-
- logger.debug(f'Executing SQL query: {query}')
-
- result = await db.execute(query)
- age_ranges = result.scalars().all()
-
- logger.info(f'Retrieved {len(age_ranges)} rows from the database')
-
- return age_ranges
diff --git a/hdx_hapi/db/dao/conflict_event_view_dao.py b/hdx_hapi/db/dao/conflict_event_view_dao.py
new file mode 100644
index 00000000..a6252deb
--- /dev/null
+++ b/hdx_hapi/db/dao/conflict_event_view_dao.py
@@ -0,0 +1,68 @@
+import logging
+from typing import Optional, Sequence
+
+from hapi_schema.utils.enums import EventType
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+
+from hdx_hapi.db.models.views.all_views import ConflictEventView
+from hdx_hapi.db.dao.util.util import (
+ apply_location_admin_filter,
+ apply_pagination,
+ apply_reference_period_filter,
+)
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+
+
+logger = logging.getLogger(__name__)
+
+
+async def conflict_event_view_list(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ event_type: Optional[EventType] = None,
+ location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin2_is_unspecified: Optional[bool] = None,
+) -> Sequence[ConflictEventView]:
+ query = select(ConflictEventView)
+ if event_type:
+ query = query.where(ConflictEventView.event_type == event_type)
+
+ query = apply_location_admin_filter(
+ query,
+ ConflictEventView,
+ location_ref,
+ location_code,
+ location_name,
+ admin1_ref,
+ admin1_code,
+ admin1_name,
+ admin1_is_unspecified,
+ admin2_ref,
+ admin2_code,
+ admin2_name,
+ admin2_is_unspecified,
+ )
+
+ query = apply_reference_period_filter(query, ref_period_parameters, ConflictEventView)
+
+ query = apply_pagination(query, pagination_parameters)
+
+ logger.debug(f'Executing SQL query: {query}')
+
+ result = await db.execute(query)
+ conflict_events = result.scalars().all()
+
+ logger.info(f'Retrieved {len(conflict_events)} rows from the database')
+
+ return conflict_events
diff --git a/hdx_hapi/db/dao/dataset_view_dao.py b/hdx_hapi/db/dao/dataset_view_dao.py
index d7115ca8..d83d50d6 100644
--- a/hdx_hapi/db/dao/dataset_view_dao.py
+++ b/hdx_hapi/db/dao/dataset_view_dao.py
@@ -3,7 +3,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_dataset_view import DatasetView
+from hdx_hapi.db.models.views.all_views import DatasetView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams
diff --git a/hdx_hapi/db/dao/food_security_view_dao.py b/hdx_hapi/db/dao/food_security_view_dao.py
index f1198fc1..14c319ca 100644
--- a/hdx_hapi/db/dao/food_security_view_dao.py
+++ b/hdx_hapi/db/dao/food_security_view_dao.py
@@ -3,7 +3,7 @@
from sqlalchemy import select
from hdx_hapi.db.models.views.db_food_security_view import FoodSecurityView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
+from hdx_hapi.db.dao.util.util import apply_location_admin_filter, apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams
@@ -29,6 +29,7 @@ async def food_security_view_list(
admin2_name: str = None,
admin2_is_unspecified: bool = None,
admin1_ref: int = None,
+ admin2_ref: int = None,
):
query = select(FoodSecurityView)
@@ -50,26 +51,22 @@ async def food_security_view_list(
query = query.where(FoodSecurityView.hapi_replaced_date >= hapi_replaced_date_min)
if hapi_replaced_date_max:
query = query.where(FoodSecurityView.hapi_replaced_date < hapi_replaced_date_max)
- if location_code:
- query = case_insensitive_filter(query, FoodSecurityView.location_code, location_code)
- if location_name:
- query = query.where(FoodSecurityView.location_name.icontains(location_name))
- if admin1_name:
- query = query.where(FoodSecurityView.admin1_name.icontains(admin1_name))
- if admin1_code:
- query = case_insensitive_filter(query, FoodSecurityView.admin1_code, admin1_code)
- if admin1_is_unspecified is not None:
- query = query.where(FoodSecurityView.admin1_is_unspecified == admin1_is_unspecified)
- if location_ref:
- query = query.where(FoodSecurityView.location_ref == location_ref)
- if admin2_code:
- query = case_insensitive_filter(query, FoodSecurityView.admin2_code, admin2_code)
- if admin2_name:
- query = query.where(FoodSecurityView.admin2_name.icontains(admin2_name))
- if admin2_is_unspecified is not None:
- query = query.where(FoodSecurityView.admin2_is_unspecified == admin2_is_unspecified)
- if admin1_ref:
- query = query.where(FoodSecurityView.admin1_ref == admin1_ref)
+
+ query = apply_location_admin_filter(
+ query,
+ FoodSecurityView,
+ location_ref,
+ location_code,
+ location_name,
+ admin1_ref,
+ admin1_code,
+ admin1_name,
+ admin1_is_unspecified,
+ admin2_ref,
+ admin2_code,
+ admin2_name,
+ admin2_is_unspecified,
+ )
query = apply_pagination(query, pagination_parameters)
diff --git a/hdx_hapi/db/dao/funding_view_dao.py b/hdx_hapi/db/dao/funding_view_dao.py
new file mode 100644
index 00000000..c0805614
--- /dev/null
+++ b/hdx_hapi/db/dao/funding_view_dao.py
@@ -0,0 +1,57 @@
+import logging
+from typing import Optional, Sequence
+
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+
+from hdx_hapi.db.models.views.all_views import FundingView
+from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+
+
+logger = logging.getLogger(__name__)
+
+
+async def funding_view_list(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ appeal_code: Optional[str] = None,
+ appeal_type: Optional[str] = None,
+ org_acronym: Optional[str] = None,
+ org_name: Optional[str] = None,
+ sector_name: Optional[str] = None,
+ # location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+) -> Sequence[FundingView]:
+ query = select(FundingView)
+ if org_acronym:
+ query = case_insensitive_filter(query, FundingView.org_acronym, org_acronym)
+ if org_name:
+ query = query.where(FundingView.org_name.icontains(org_name))
+ if sector_name:
+ query = query.where(FundingView.sector_name.icontains(sector_name))
+ # if location_ref:
+ # query = query.where(FundingView.location_ref == location_ref)
+ if location_code:
+ query = case_insensitive_filter(query, FundingView.location_code, location_code)
+ if location_name:
+ query = query.where(FundingView.location_name.icontains(location_name))
+ if appeal_code:
+ query = case_insensitive_filter(query, FundingView.appeal_code, appeal_code)
+ if appeal_type:
+ query = case_insensitive_filter(query, FundingView.appeal_type, appeal_type)
+
+ query = apply_reference_period_filter(query, ref_period_parameters, FundingView)
+
+ query = apply_pagination(query, pagination_parameters)
+
+ logger.debug(f'Executing SQL query: {query}')
+
+ result = await db.execute(query)
+ funding = result.scalars().all()
+
+ logger.info(f'Retrieved {len(funding)} rows from the database')
+
+ return funding
diff --git a/hdx_hapi/db/dao/gender_view_dao.py b/hdx_hapi/db/dao/gender_view_dao.py
deleted file mode 100644
index 8c18a879..00000000
--- a/hdx_hapi/db/dao/gender_view_dao.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import logging
-
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import select
-
-from hdx_hapi.db.models.views.db_gender_view import GenderView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-logger = logging.getLogger(__name__)
-
-
-async def genders_view_list(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None,
-):
- logger.info(f'genders_view_list called with params: code={code}, description={description}')
-
- query = select(GenderView)
- if code:
- query = case_insensitive_filter(query, GenderView.code, code)
- if description:
- query = query.where(GenderView.description.icontains(description))
-
- query = apply_pagination(query, pagination_parameters)
-
- logger.debug(f'Executing SQL query: {query}')
-
- result = await db.execute(query)
- genders = result.scalars().all()
-
- logger.info(f'Retrieved {len(genders)} rows from the database')
-
- return genders
diff --git a/hdx_hapi/db/dao/humanitarian_needs_view_dao.py b/hdx_hapi/db/dao/humanitarian_needs_view_dao.py
index 5a6f52f2..2b123bfd 100644
--- a/hdx_hapi/db/dao/humanitarian_needs_view_dao.py
+++ b/hdx_hapi/db/dao/humanitarian_needs_view_dao.py
@@ -1,96 +1,86 @@
-import datetime
+from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_humanitarian_needs_view import HumanitarianNeedsView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.db.models.views.all_views import HumanitarianNeedsView
+from hdx_hapi.db.dao.util.util import (
+ apply_location_admin_filter,
+ apply_pagination,
+ apply_reference_period_filter,
+)
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus
async def humanitarian_needs_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- gender_code: str = None,
- age_range_code: str = None,
- disabled_marker: bool = None,
- sector_code: str = None,
- sector_name: str = None,
- population_group_code: str = None,
- population_status_code: str = None,
- population: int = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min: datetime = None,
- resource_update_date_max: datetime = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- location_code: str = None,
- location_name: str = None,
- admin1_code: str = None,
- # admin1_name: str = None,
- admin1_is_unspecified: bool = None,
- location_ref: int = None,
- admin2_code: str = None,
- admin2_name: str = None,
- admin2_is_unspecified: bool = None,
- admin1_ref: int = None,
+ admin2_ref: Optional[int] = None,
+ gender: Optional[Gender] = None,
+ age_range: Optional[str] = None,
+ min_age: Optional[int] = None,
+ max_age: Optional[int] = None,
+ disabled_marker: Optional[DisabledMarker] = None,
+ sector_code: Optional[str] = None,
+ population_group: Optional[PopulationGroup] = None,
+ population_status: Optional[PopulationStatus] = None,
+ population: Optional[int] = None,
+ sector_name: Optional[str] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ location_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ admin2_is_unspecified: Optional[bool] = None,
):
query = select(HumanitarianNeedsView)
- if gender_code:
- query = case_insensitive_filter(query, HumanitarianNeedsView.gender_code, gender_code)
- if age_range_code:
- query = query.where(HumanitarianNeedsView.age_range_code == age_range_code)
+ if gender:
+ query = query.where(HumanitarianNeedsView.gender == gender)
+ if age_range:
+ query = query.where(HumanitarianNeedsView.age_range == age_range)
+ # if min_age:
+ # query = query.where(HumanitarianNeedsView.min_age == min_age)
+ # if max_age:
+ # query = query.where(HumanitarianNeedsView.max_age == max_age)
if disabled_marker:
query = query.where(HumanitarianNeedsView.disabled_marker == disabled_marker)
if sector_code:
query = query.where(HumanitarianNeedsView.sector_code.icontains(sector_code))
- if sector_name:
- query = query.where(HumanitarianNeedsView.sector_name.icontains(sector_name))
- if population_group_code:
- query = query.where(HumanitarianNeedsView.population_group_code.icontains(population_group_code))
- if population_status_code:
- query = query.where(HumanitarianNeedsView.population_status_code.icontains(population_status_code))
+ if population_group:
+ query = query.where(HumanitarianNeedsView.population_group == population_group)
+ if population_status:
+ query = query.where(HumanitarianNeedsView.population_status == population_status)
+
if population:
query = query.where(HumanitarianNeedsView.population == population)
- if dataset_hdx_provider_stub:
- query = case_insensitive_filter(
- query, HumanitarianNeedsView.dataset_hdx_provider_stub, dataset_hdx_provider_stub
- )
- if resource_update_date_min:
- query = query.where(HumanitarianNeedsView.resource_update_date >= resource_update_date_min)
- if resource_update_date_max:
- query = query.where(HumanitarianNeedsView.resource_update_date < resource_update_date_max)
- if hapi_updated_date_min:
- query = query.where(HumanitarianNeedsView.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(HumanitarianNeedsView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(HumanitarianNeedsView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(HumanitarianNeedsView.hapi_replaced_date < hapi_replaced_date_max)
- if location_code:
- query = case_insensitive_filter(query, HumanitarianNeedsView.location_code, location_code)
- if location_name:
- query = query.where(HumanitarianNeedsView.location_name.icontains(location_name))
- if admin1_code:
- query = case_insensitive_filter(query, HumanitarianNeedsView.admin1_code, admin1_code)
- # if admin1_name:
- # query = query.where(HumanitarianNeedsView.admin1_name.icontains(admin1_name))
- if admin1_is_unspecified is not None:
- query = query.where(HumanitarianNeedsView.admin1_is_unspecified == admin1_is_unspecified)
- if location_ref:
- query = query.where(HumanitarianNeedsView.location_ref == location_ref)
- if admin2_code:
- query = case_insensitive_filter(query, HumanitarianNeedsView.admin2_code, admin2_code)
- if admin2_name:
- query = query.where(HumanitarianNeedsView.admin2_name.icontains(admin2_name))
- if admin2_is_unspecified is not None:
- query = query.where(HumanitarianNeedsView.admin2_is_unspecified == admin2_is_unspecified)
- if admin1_ref:
- query = query.where(HumanitarianNeedsView.admin1_ref == admin1_ref)
+
+ if sector_name:
+ query = query.where(HumanitarianNeedsView.sector_name.icontains(sector_name))
+
+ query = apply_location_admin_filter(
+ query,
+ HumanitarianNeedsView,
+ location_ref,
+ location_code,
+ location_name,
+ admin1_ref,
+ admin1_code,
+ admin1_name,
+ admin1_is_unspecified,
+ admin2_ref,
+ admin2_code,
+ admin2_name,
+ admin2_is_unspecified,
+ )
+
+ query = apply_reference_period_filter(query, ref_period_parameters, HumanitarianNeedsView)
query = apply_pagination(query, pagination_parameters)
diff --git a/hdx_hapi/db/dao/location_view_dao.py b/hdx_hapi/db/dao/location_view_dao.py
index d73dfd56..2700b01f 100644
--- a/hdx_hapi/db/dao/location_view_dao.py
+++ b/hdx_hapi/db/dao/location_view_dao.py
@@ -1,25 +1,21 @@
-from datetime import datetime
import logging
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_location_view import LocationView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.db.models.views.all_views import LocationView
+from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
logger = logging.getLogger(__name__)
async def locations_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
):
logger.info(f'orgs_view_list called with params: code={code}, name={name}')
@@ -28,14 +24,8 @@ async def locations_view_list(
query = case_insensitive_filter(query, LocationView.code, code)
if name:
query = query.where(LocationView.name.icontains(name))
- if hapi_updated_date_min:
- query = query.where(LocationView.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(LocationView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(LocationView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(LocationView.hapi_replaced_date < hapi_replaced_date_max)
+
+ query = apply_reference_period_filter(query, ref_period_parameters, LocationView)
query = apply_pagination(query, pagination_parameters)
diff --git a/hdx_hapi/db/dao/national_risk_view_dao.py b/hdx_hapi/db/dao/national_risk_view_dao.py
index 4a91b05d..4a981935 100644
--- a/hdx_hapi/db/dao/national_risk_view_dao.py
+++ b/hdx_hapi/db/dao/national_risk_view_dao.py
@@ -1,61 +1,56 @@
-import datetime
+from typing import Optional
+from hapi_schema.utils.enums import RiskClass
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_national_risk_view import NationalRiskView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
-
+from hdx_hapi.db.models.views.all_views import NationalRiskView
+from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
async def national_risks_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- risk_class: int = None,
- global_rank: int = None,
- overall_risk: float = None,
- hazard_exposure_risk: float = None,
- vulnerability_risk: float = None,
- coping_capacity_risk: float = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min: datetime = None,
- resource_update_date_max: datetime = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- # sector_name: str = None,
- location_code: str = None,
- location_name: str = None,
+ risk_class: Optional[RiskClass] = None,
+ global_rank_min: Optional[int] = None,
+ global_rank_max: Optional[int] = None,
+ overall_risk_min: Optional[float] = None,
+ overall_risk_max: Optional[float] = None,
+ hazard_exposure_risk_min: Optional[float] = None,
+ hazard_exposure_risk_max: Optional[float] = None,
+ vulnerability_risk_min: Optional[float] = None,
+ vulnerability_risk_max: Optional[float] = None,
+ coping_capacity_risk_min: Optional[float] = None,
+ coping_capacity_risk_max: Optional[float] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
):
query = select(NationalRiskView)
if risk_class:
query = query.where(NationalRiskView.risk_class == risk_class)
- if global_rank:
- query = query.where(NationalRiskView.global_rank == global_rank)
- if overall_risk:
- query = query.where(NationalRiskView.overall_risk == overall_risk)
- if hazard_exposure_risk:
- query = query.where(NationalRiskView.hazard_exposure_risk == hazard_exposure_risk)
- if vulnerability_risk:
- query = query.where(NationalRiskView.vulnerability_risk == vulnerability_risk)
- if coping_capacity_risk:
- query = query.where(NationalRiskView.coping_capacity_risk == coping_capacity_risk)
- if dataset_hdx_provider_stub:
- query = case_insensitive_filter(query, NationalRiskView.dataset_hdx_provider_stub, dataset_hdx_provider_stub)
- if resource_update_date_min:
- query = query.where(NationalRiskView.resource_update_date >= resource_update_date_min)
- if resource_update_date_max:
- query = query.where(NationalRiskView.resource_update_date < resource_update_date_max)
- if hapi_updated_date_min:
- query = query.where(NationalRiskView.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(NationalRiskView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(NationalRiskView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(NationalRiskView.hapi_replaced_date < hapi_replaced_date_max)
+ if global_rank_min:
+ query = query.where(NationalRiskView.global_rank >= global_rank_min)
+ if global_rank_max:
+ query = query.where(NationalRiskView.global_rank < global_rank_max)
+ if overall_risk_min:
+ query = query.where(NationalRiskView.overall_risk >= overall_risk_min)
+ if overall_risk_max:
+ query = query.where(NationalRiskView.overall_risk < overall_risk_max)
+ if hazard_exposure_risk_min:
+ query = query.where(NationalRiskView.hazard_exposure_risk >= hazard_exposure_risk_min)
+ if hazard_exposure_risk_max:
+ query = query.where(NationalRiskView.hazard_exposure_risk < hazard_exposure_risk_max)
+ if vulnerability_risk_min:
+ query = query.where(NationalRiskView.vulnerability_risk >= vulnerability_risk_min)
+ if vulnerability_risk_max:
+ query = query.where(NationalRiskView.vulnerability_risk < vulnerability_risk_max)
+ if coping_capacity_risk_min:
+ query = query.where(NationalRiskView.coping_capacity_risk >= coping_capacity_risk_min)
+ if coping_capacity_risk_max:
+ query = query.where(NationalRiskView.coping_capacity_risk < coping_capacity_risk_max)
+
# if sector_name:
# query = query.where(NationalRiskView.sector_name.icontains(sector_name))
if location_code:
@@ -63,6 +58,8 @@ async def national_risks_view_list(
if location_name:
query = query.where(NationalRiskView.location_name.icontains(location_name))
+ query = apply_reference_period_filter(query, ref_period_parameters, NationalRiskView)
+
query = apply_pagination(query, pagination_parameters)
result = await db.execute(query)
diff --git a/hdx_hapi/db/dao/operational_presence_view_dao.py b/hdx_hapi/db/dao/operational_presence_view_dao.py
index f849fa2a..578ae163 100644
--- a/hdx_hapi/db/dao/operational_presence_view_dao.py
+++ b/hdx_hapi/db/dao/operational_presence_view_dao.py
@@ -1,12 +1,17 @@
import logging
-from datetime import datetime
+from typing import Optional, Sequence
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_operational_presence_view import OperationalPresenceView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.db.models.views.all_views import OperationalPresenceView
+from hdx_hapi.db.dao.util.util import (
+ apply_location_admin_filter,
+ apply_pagination,
+ apply_reference_period_filter,
+ case_insensitive_filter,
+)
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
logger = logging.getLogger(__name__)
@@ -14,83 +19,83 @@
async def operational_presences_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- sector_code: str = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min: datetime = None,
- resource_update_date_max: datetime = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- org_acronym: str = None,
- org_name: str = None,
- sector_name: str = None,
- location_code: str = None,
- location_name: str = None,
- admin1_code: str = None,
- admin1_name: str = None,
- admin1_is_unspecified: bool = None,
- location_ref: int = None,
- admin2_code: str = None,
- admin2_name: str = None,
- admin2_is_unspecified: bool = None,
- admin1_ref: int = None,
-):
+ sector_code: Optional[str] = None,
+ # dataset_hdx_provider_stub: str = None,
+ # resource_update_date_min: datetime = None,
+ # resource_update_date_max: datetime = None,
+ # hapi_updated_date_min: datetime = None,
+ # hapi_updated_date_max: datetime = None,
+ # hapi_replaced_date_min: datetime = None,
+ # hapi_replaced_date_max: datetime = None,
+ org_acronym: Optional[str] = None,
+ org_name: Optional[str] = None,
+ sector_name: Optional[str] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ location_ref: Optional[int] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin2_is_unspecified: Optional[bool] = None,
+) -> Sequence[OperationalPresenceView]:
logger.info(
f'operational_presences_view_list called with params: sector_code={sector_code}, '
- f'dataset_hdx_provider_stub={dataset_hdx_provider_stub}, resource_update_date_min={resource_update_date_min}, '
- f'resource_update_date_max={resource_update_date_max}, org_acronym={org_acronym}, org_name={org_name}, '
+ f'org_acronym={org_acronym}, org_name={org_name}, '
f'sector_name={sector_name}, location_code={location_code}, location_name={location_name}, '
f'admin1_code={admin1_code}, admin1_name={admin1_name}, admin1_is_unspecified={admin1_is_unspecified}, '
- f'admin2_code={admin2_code}, admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}'
+ f'admin2_code={admin2_code}, admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}, '
+ f'ref_period_parameters={ref_period_parameters}'
)
query = select(OperationalPresenceView)
- if sector_code:
- query = query.where(OperationalPresenceView.sector_code.icontains(sector_code))
- if dataset_hdx_provider_stub:
- query = case_insensitive_filter(
- query, OperationalPresenceView.dataset_hdx_provider_stub, dataset_hdx_provider_stub
- )
- if resource_update_date_min:
- query = query.where(OperationalPresenceView.resource_update_date >= resource_update_date_min)
- if resource_update_date_max:
- query = query.where(OperationalPresenceView.resource_update_date < resource_update_date_max)
- if hapi_updated_date_min:
- query = query.where(OperationalPresenceView.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(OperationalPresenceView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(OperationalPresenceView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(OperationalPresenceView.hapi_replaced_date < hapi_replaced_date_max)
+ # if dataset_hdx_provider_stub:
+ # query = case_insensitive_filter(
+ # query, OperationalPresenceView.dataset_hdx_provider_stub, dataset_hdx_provider_stub
+ # )
+ # if resource_update_date_min:
+ # query = query.where(OperationalPresenceView.resource_update_date >= resource_update_date_min)
+ # if resource_update_date_max:
+ # query = query.where(OperationalPresenceView.resource_update_date < resource_update_date_max)
+ # if hapi_updated_date_min:
+ # query = query.where(OperationalPresenceView.hapi_updated_date >= hapi_updated_date_min)
+ # if hapi_updated_date_max:
+ # query = query.where(OperationalPresenceView.hapi_updated_date < hapi_updated_date_max)
+ # if hapi_replaced_date_min:
+ # query = query.where(OperationalPresenceView.hapi_replaced_date >= hapi_replaced_date_min)
+ # if hapi_replaced_date_max:
+ # query = query.where(OperationalPresenceView.hapi_replaced_date < hapi_replaced_date_max)
if org_acronym:
query = case_insensitive_filter(query, OperationalPresenceView.org_acronym, org_acronym)
if org_name:
query = query.where(OperationalPresenceView.org_name.icontains(org_name))
+ if sector_code:
+ query = query.where(OperationalPresenceView.sector_code.icontains(sector_code))
if sector_name:
query = query.where(OperationalPresenceView.sector_name.icontains(sector_name))
- if location_code:
- query = case_insensitive_filter(query, OperationalPresenceView.location_code, location_code)
- if location_name:
- query = query.where(OperationalPresenceView.location_name.icontains(location_name))
- if admin1_code:
- query = case_insensitive_filter(query, OperationalPresenceView.admin1_code, admin1_code)
- if admin1_name:
- query = query.where(OperationalPresenceView.admin1_name.icontains(admin1_name))
- if location_ref:
- query = query.where(OperationalPresenceView.location_ref == location_ref)
- if admin2_code:
- query = case_insensitive_filter(query, OperationalPresenceView.admin2_code, admin2_code)
- if admin2_name:
- query = query.where(OperationalPresenceView.admin2_name.icontains(admin2_name))
- if admin1_is_unspecified is not None:
- query = query.where(OperationalPresenceView.admin1_is_unspecified == admin1_is_unspecified)
- if admin2_is_unspecified is not None:
- query = query.where(OperationalPresenceView.admin2_is_unspecified == admin2_is_unspecified)
- if admin1_ref:
- query = query.where(OperationalPresenceView.admin1_ref == admin1_ref)
+
+ query = apply_location_admin_filter(
+ query,
+ OperationalPresenceView,
+ location_ref,
+ location_code,
+ location_name,
+ admin1_ref,
+ admin1_code,
+ admin1_name,
+ admin1_is_unspecified,
+ admin2_ref,
+ admin2_code,
+ admin2_name,
+ admin2_is_unspecified,
+ )
+
+ query = apply_reference_period_filter(query, ref_period_parameters, OperationalPresenceView)
query = apply_pagination(query, pagination_parameters)
diff --git a/hdx_hapi/db/dao/org_type_view_dao.py b/hdx_hapi/db/dao/org_type_view_dao.py
index 3413a207..73528117 100644
--- a/hdx_hapi/db/dao/org_type_view_dao.py
+++ b/hdx_hapi/db/dao/org_type_view_dao.py
@@ -3,7 +3,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_org_type_view import OrgTypeView
+from hdx_hapi.db.models.views.all_views import OrgTypeView
from hdx_hapi.db.dao.util.util import apply_pagination
from hdx_hapi.endpoints.util.util import PaginationParams
diff --git a/hdx_hapi/db/dao/org_view_dao.py b/hdx_hapi/db/dao/org_view_dao.py
index f4c5f086..0147c94e 100644
--- a/hdx_hapi/db/dao/org_view_dao.py
+++ b/hdx_hapi/db/dao/org_view_dao.py
@@ -3,7 +3,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_org_view import OrgView
+from hdx_hapi.db.models.views.all_views import OrgView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams
diff --git a/hdx_hapi/db/dao/population_group_view_dao.py b/hdx_hapi/db/dao/population_group_view_dao.py
deleted file mode 100644
index 8d32db12..00000000
--- a/hdx_hapi/db/dao/population_group_view_dao.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import logging
-
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import select
-
-from hdx_hapi.db.models.views.db_population_group_view import PopulationGroupView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-logger = logging.getLogger(__name__)
-
-
-async def population_groups_view_list(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None,
-):
- logger.info(f'population_groups_view_list called with params: code={code}, description={description}')
-
- query = select(PopulationGroupView)
- if code:
- query = case_insensitive_filter(query, PopulationGroupView.code, code)
- if description:
- query = query.where(PopulationGroupView.description.icontains(description))
-
- query = apply_pagination(query, pagination_parameters)
-
- logger.debug(f'Executing SQL query: {query}')
-
- result = await db.execute(query)
- population_groups = result.scalars().all()
-
- logger.info(f'Retrieved {len(population_groups)} rows from the database')
-
- return population_groups
diff --git a/hdx_hapi/db/dao/population_status_view_dao.py b/hdx_hapi/db/dao/population_status_view_dao.py
deleted file mode 100644
index 652fd9d8..00000000
--- a/hdx_hapi/db/dao/population_status_view_dao.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import logging
-
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import select
-
-from hdx_hapi.db.models.views.db_population_status_view import PopulationStatusView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-logger = logging.getLogger(__name__)
-
-
-async def population_statuses_view_list(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None,
-):
- logger.info(f'population_statuses_view_list called with params: code={code}, description={description}')
-
- query = select(PopulationStatusView)
- if code:
- query = case_insensitive_filter(query, PopulationStatusView.code, code)
- if description:
- query = query.where(PopulationStatusView.description.icontains(description))
-
- query = apply_pagination(query, pagination_parameters)
-
- logger.debug(f'Executing SQL query: {query}')
-
- result = await db.execute(query)
- population_statuses = result.scalars().all()
-
- logger.info(f'Retrieved {len(population_statuses)} rows from the database')
-
- return population_statuses
diff --git a/hdx_hapi/db/dao/population_view_dao.py b/hdx_hapi/db/dao/population_view_dao.py
index b2ba7a42..469fecf0 100644
--- a/hdx_hapi/db/dao/population_view_dao.py
+++ b/hdx_hapi/db/dao/population_view_dao.py
@@ -1,12 +1,19 @@
import logging
-import datetime
+from typing import Optional, Sequence
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_population_view import PopulationView
-from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hapi_schema.utils.enums import Gender
+
+from hdx_hapi.db.models.views.all_views import PopulationView
+from hdx_hapi.db.dao.util.util import (
+ apply_location_admin_filter,
+ apply_pagination,
+ apply_reference_period_filter,
+ case_insensitive_filter,
+)
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
logger = logging.getLogger(__name__)
@@ -14,82 +21,66 @@
async def populations_view_list(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- gender_code: str = None,
- age_range_code: str = None,
- population: int = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min: datetime = None,
- resource_update_date_max: datetime = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- location_code: str = None,
- location_name: str = None,
- admin1_name: str = None,
- admin1_code: str = None,
- admin1_is_unspecified: bool = None,
- location_ref: int = None,
- admin2_code: str = None,
- admin2_name: str = None,
- admin2_is_unspecified: bool = None,
- admin1_ref: int = None,
-):
+ gender: Optional[Gender] = None,
+ age_range: Optional[str] = None,
+ min_age: Optional[int] = None,
+ max_age: Optional[int] = None,
+ population: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ location_ref: Optional[int] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin2_is_unspecified: Optional[bool] = None,
+) -> Sequence[PopulationView]:
logger.info(
- f'populations_view_list called with params: gender_code={gender_code}, age_range_code={age_range_code}, '
- f'population={population}, dataset_hdx_provider_stub={dataset_hdx_provider_stub}, '
- f'resource_update_date_min={resource_update_date_min}, resource_update_date_max={resource_update_date_max}, '
+ f'populations_view_list called with params: gender={gender}, age_range={age_range}, '
+ f'min_age={min_age}, max_age={max_age}, '
+ f'population={population}, '
f'location_code={location_code}, location_name={location_name}, admin1_name={admin1_name}, '
f'admin1_code={admin1_code}, admin1_is_unspecified={admin1_is_unspecified}, admin2_code={admin2_code}, '
f'admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}'
+ f'ref_period_parameters={ref_period_parameters}'
)
query = select(PopulationView)
- if gender_code:
- query = case_insensitive_filter(query, PopulationView.gender_code, gender_code)
- if age_range_code:
- query = query.where(PopulationView.age_range_code == age_range_code)
+ if gender:
+ query = query.where(PopulationView.gender == gender)
+ if age_range:
+ query = case_insensitive_filter(query, PopulationView.age_range, age_range)
if population:
query = query.where(PopulationView.population == population)
- if dataset_hdx_provider_stub:
- query = case_insensitive_filter(query, PopulationView.dataset_hdx_provider_stub, dataset_hdx_provider_stub)
- if resource_update_date_min:
- query = query.where(PopulationView.resource_update_date >= resource_update_date_min)
- if resource_update_date_max:
- query = query.where(PopulationView.resource_update_date < resource_update_date_max)
- if hapi_updated_date_min:
- query = query.where(PopulationView.hapi_updated_date >= hapi_updated_date_min)
- if hapi_updated_date_max:
- query = query.where(PopulationView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(PopulationView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(PopulationView.hapi_replaced_date < hapi_replaced_date_max)
- if location_code:
- query = case_insensitive_filter(query, PopulationView.location_code, location_code)
- if location_name:
- query = query.where(PopulationView.location_name.icontains(location_name))
- if admin1_name:
- query = query.where(PopulationView.admin1_name.icontains(admin1_name))
- if admin1_code:
- query = case_insensitive_filter(query, PopulationView.admin1_code, admin1_code)
- if admin1_is_unspecified is not None:
- query = query.where(PopulationView.admin1_is_unspecified == admin1_is_unspecified)
- if location_ref:
- query = query.where(PopulationView.location_ref == location_ref)
- if admin2_code:
- query = case_insensitive_filter(query, PopulationView.admin2_code, admin2_code)
- if admin2_name:
- query = query.where(PopulationView.admin2_name.icontains(admin2_name))
- if admin2_is_unspecified is not None:
- query = query.where(PopulationView.admin2_is_unspecified == admin2_is_unspecified)
- if admin1_ref:
- query = query.where(PopulationView.admin1_ref == admin1_ref)
+ if min_age:
+ query = query.where(PopulationView.min_age >= min_age)
+ if max_age:
+ query = query.where(PopulationView.max_age <= max_age)
+ query = apply_location_admin_filter(
+ query,
+ PopulationView,
+ location_ref,
+ location_code,
+ location_name,
+ admin1_ref,
+ admin1_code,
+ admin1_name,
+ admin1_is_unspecified,
+ admin2_ref,
+ admin2_code,
+ admin2_name,
+ admin2_is_unspecified,
+ )
- query = apply_pagination(query, pagination_parameters)
+ query = apply_reference_period_filter(query, ref_period_parameters, PopulationView)
- logger.debug(f'Executing SQL query: {query}')
+ query = apply_pagination(query, pagination_parameters)
+ logger.info(f'Executing SQL query: {query}')
result = await db.execute(query)
populations = result.scalars().all()
diff --git a/hdx_hapi/db/dao/refugees_view_dao.py b/hdx_hapi/db/dao/refugees_view_dao.py
new file mode 100644
index 00000000..ab0c4cc7
--- /dev/null
+++ b/hdx_hapi/db/dao/refugees_view_dao.py
@@ -0,0 +1,57 @@
+from typing import Optional
+
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+
+from hdx_hapi.db.models.views.all_views import RefugeesView
+from hdx_hapi.db.dao.util.util import (
+ apply_pagination,
+ apply_reference_period_filter,
+ case_insensitive_filter,
+)
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+from hapi_schema.utils.enums import Gender, PopulationGroup
+
+
+async def refugees_view_list(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ population_group: Optional[PopulationGroup] = None,
+ gender: Optional[Gender] = None,
+ age_range: Optional[str] = None,
+ min_age: Optional[int] = None,
+ max_age: Optional[int] = None,
+ origin_location_code: Optional[str] = None,
+ origin_location_name: Optional[str] = None,
+ asylum_location_code: Optional[str] = None,
+ asylum_location_name: Optional[str] = None,
+):
+ query = select(RefugeesView)
+
+ if gender:
+ query = query.where(RefugeesView.gender == gender)
+ if age_range:
+ query = query.where(RefugeesView.age_range == age_range)
+ # if min_age:
+ # query = query.where(RefugeesView.min_age == min_age)
+ # if max_age:
+ # query = query.where(RefugeesView.max_age == max_age)
+ if population_group:
+ query = query.where(RefugeesView.population_group == population_group)
+ if origin_location_code:
+ query = case_insensitive_filter(query, RefugeesView.origin_location_code, origin_location_code)
+ if origin_location_name:
+ query = query.where(RefugeesView.origin_location_name.icontains(origin_location_name))
+ if asylum_location_code:
+ query = case_insensitive_filter(query, RefugeesView.asylum_location_code, asylum_location_code)
+ if asylum_location_name:
+ query = query.where(RefugeesView.asylum_location_name.icontains(asylum_location_name))
+
+ query = apply_reference_period_filter(query, ref_period_parameters, RefugeesView)
+
+ query = apply_pagination(query, pagination_parameters)
+
+ result = await db.execute(query)
+ refugees = result.scalars().all()
+ return refugees
diff --git a/hdx_hapi/db/dao/resource_view_dao.py b/hdx_hapi/db/dao/resource_view_dao.py
index 4ac16400..e3df4502 100644
--- a/hdx_hapi/db/dao/resource_view_dao.py
+++ b/hdx_hapi/db/dao/resource_view_dao.py
@@ -2,12 +2,11 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_resource_view import ResourceView
+from hdx_hapi.db.models.views.all_views import ResourceView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams
-
async def resources_view_list(
pagination_parameters: PaginationParams,
db: AsyncSession,
@@ -18,8 +17,6 @@ async def resources_view_list(
is_hxl: bool = None,
hapi_updated_date_min: datetime = None,
hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
dataset_title: str = None,
dataset_hdx_id: str = None,
dataset_hdx_stub: str = None,
@@ -41,10 +38,6 @@ async def resources_view_list(
query = query.where(ResourceView.hapi_updated_date >= hapi_updated_date_min)
if hapi_updated_date_max:
query = query.where(ResourceView.hapi_updated_date < hapi_updated_date_max)
- if hapi_replaced_date_min:
- query = query.where(ResourceView.hapi_replaced_date >= hapi_replaced_date_min)
- if hapi_replaced_date_max:
- query = query.where(ResourceView.hapi_replaced_date < hapi_replaced_date_max)
if dataset_title:
query = query.where(ResourceView.dataset_title == dataset_title)
if dataset_hdx_id:
diff --git a/hdx_hapi/db/dao/sector_view_dao.py b/hdx_hapi/db/dao/sector_view_dao.py
index 9c121728..daa9df06 100644
--- a/hdx_hapi/db/dao/sector_view_dao.py
+++ b/hdx_hapi/db/dao/sector_view_dao.py
@@ -3,7 +3,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
-from hdx_hapi.db.models.views.db_sector_view import SectorView
+from hdx_hapi.db.models.views.all_views import SectorView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams
diff --git a/hdx_hapi/db/dao/util/util.py b/hdx_hapi/db/dao/util/util.py
index aa4dcd71..1bf1fd59 100644
--- a/hdx_hapi/db/dao/util/util.py
+++ b/hdx_hapi/db/dao/util/util.py
@@ -1,6 +1,8 @@
-from sqlalchemy import Column, Select
+from typing import Optional, Protocol, Type
+from sqlalchemy import DateTime, Select
+from sqlalchemy.orm import Mapped
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
def apply_pagination(query: Select, pagination_parameters: PaginationParams) -> Select:
@@ -14,6 +16,82 @@ def apply_pagination(query: Select, pagination_parameters: PaginationParams) ->
return query.limit(limit).offset(offset)
-def case_insensitive_filter(query: Select, column: Column, value: str) -> Select:
+class EntityWithReferencePeriod(Protocol):
+ reference_period_start: Mapped[DateTime]
+ reference_period_end: Mapped[DateTime]
+
+
+def apply_reference_period_filter(
+ query: Select,
+ ref_period_parameters: ReferencePeriodParameters,
+ db_class: Type[EntityWithReferencePeriod],
+) -> Select:
+ if ref_period_parameters.reference_period_start_min:
+ query = query.where(db_class.reference_period_start >= ref_period_parameters.reference_period_start_min)
+ if ref_period_parameters.reference_period_start_max:
+ query = query.where(db_class.reference_period_start < ref_period_parameters.reference_period_start_max)
+ if ref_period_parameters.reference_period_end_min:
+ query = query.where(db_class.reference_period_end >= ref_period_parameters.reference_period_end_min)
+ if ref_period_parameters.reference_period_end_max:
+ query = query.where(db_class.reference_period_end < ref_period_parameters.reference_period_end_max)
+ return query
+
+
+class EntityWithLocationAdmin(Protocol):
+ location_ref: Mapped[int]
+ location_code: Mapped[str]
+ location_name: Mapped[str]
+ admin1_ref: Mapped[int]
+ admin1_code: Mapped[str]
+ admin1_name: Mapped[str]
+ admin1_is_unspecified: Mapped[bool]
+ admin2_ref: Mapped[int]
+ admin2_code: Mapped[str]
+ admin2_name: Mapped[str]
+ admin2_is_unspecified: Mapped[bool]
+
+
+def apply_location_admin_filter(
+ query: Select,
+ db_class: Type[EntityWithLocationAdmin],
+ location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin2_is_unspecified: Optional[bool] = None,
+) -> Select:
+ if location_ref:
+ query = query.where(db_class.location_ref == location_ref)
+ if location_code:
+ query = case_insensitive_filter(query, db_class.location_code, location_code)
+ if location_name:
+ query = query.where(db_class.location_name.icontains(location_name))
+ if admin1_ref:
+ query = query.where(db_class.admin1_ref == admin1_ref)
+ if admin1_code:
+ query = case_insensitive_filter(query, db_class.admin1_code, admin1_code)
+ if admin1_name:
+ query = query.where(db_class.admin1_name.icontains(admin1_name))
+ if admin2_ref:
+ query = query.where(db_class.admin2_ref == admin2_ref)
+ if admin2_code:
+ query = case_insensitive_filter(query, db_class.admin2_code, admin2_code)
+ if admin2_name:
+ query = query.where(db_class.admin2_name.icontains(admin2_name))
+ if admin1_is_unspecified is not None:
+ query = query.where(db_class.admin1_is_unspecified == admin1_is_unspecified)
+ if admin2_is_unspecified is not None:
+ query = query.where(db_class.admin2_is_unspecified == admin2_is_unspecified)
+
+ return query
+
+
+def case_insensitive_filter(query: Select, column: Mapped[str], value: str) -> Select:
query = query.where(column.ilike(value))
return query
diff --git a/hdx_hapi/db/models/views/all_views.py b/hdx_hapi/db/models/views/all_views.py
new file mode 100644
index 00000000..4f01e515
--- /dev/null
+++ b/hdx_hapi/db/models/views/all_views.py
@@ -0,0 +1,412 @@
+"""
+This code was generated automatically using src/hapi_schema/utils/hapi_views_code_generator.py
+"""
+
+from decimal import Decimal
+from sqlalchemy import DateTime
+from sqlalchemy.orm import column_property, Mapped
+from hdx_hapi.db.models.views.util.util import view
+from hdx_hapi.db.models.base import Base
+from hapi_schema.db_admin1 import view_params_admin1
+from hapi_schema.db_admin2 import view_params_admin2
+from hapi_schema.db_conflict_event import view_params_conflict_event
+from hapi_schema.db_currency import view_params_currency
+from hapi_schema.db_dataset import view_params_dataset
+from hapi_schema.db_food_price import view_params_food_price
+from hapi_schema.db_food_security import view_params_food_security
+from hapi_schema.db_funding import view_params_funding
+from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs
+from hapi_schema.db_location import view_params_location
+from hapi_schema.db_national_risk import view_params_national_risk
+from hapi_schema.db_operational_presence import view_params_operational_presence
+from hapi_schema.db_org_type import view_params_org_type
+from hapi_schema.db_org import view_params_org
+from hapi_schema.db_population import view_params_population
+from hapi_schema.db_poverty_rate import view_params_poverty_rate
+from hapi_schema.db_refugees import view_params_refugees
+from hapi_schema.db_resource import view_params_resource
+from hapi_schema.db_sector import view_params_sector
+from hapi_schema.db_wfp_commodity import view_params_wfp_commodity
+from hapi_schema.db_wfp_market import view_params_wfp_market
+from hapi_schema.db_patch import view_params_patch
+from hapi_schema.utils.enums import RiskClass
+from hapi_schema.utils.enums import Gender
+
+admin1_view = view(view_params_admin1.name, Base.metadata, view_params_admin1.selectable)
+admin2_view = view(view_params_admin2.name, Base.metadata, view_params_admin2.selectable)
+conflict_event_view = view(view_params_conflict_event.name, Base.metadata, view_params_conflict_event.selectable)
+currency_view = view(view_params_currency.name, Base.metadata, view_params_currency.selectable)
+dataset_view = view(view_params_dataset.name, Base.metadata, view_params_dataset.selectable)
+food_price_view = view(view_params_food_price.name, Base.metadata, view_params_food_price.selectable)
+food_security_view = view(view_params_food_security.name, Base.metadata, view_params_food_security.selectable)
+funding_view = view(view_params_funding.name, Base.metadata, view_params_funding.selectable)
+humanitarian_needs_view = view(
+ view_params_humanitarian_needs.name, Base.metadata, view_params_humanitarian_needs.selectable
+)
+location_view = view(view_params_location.name, Base.metadata, view_params_location.selectable)
+national_risk_view = view(view_params_national_risk.name, Base.metadata, view_params_national_risk.selectable)
+operational_presence_view = view(
+ view_params_operational_presence.name, Base.metadata, view_params_operational_presence.selectable
+)
+org_type_view = view(view_params_org_type.name, Base.metadata, view_params_org_type.selectable)
+org_view = view(view_params_org.name, Base.metadata, view_params_org.selectable)
+population_view = view(view_params_population.name, Base.metadata, view_params_population.selectable)
+poverty_rate_view = view(view_params_poverty_rate.name, Base.metadata, view_params_poverty_rate.selectable)
+refugees_view = view(view_params_refugees.name, Base.metadata, view_params_refugees.selectable)
+resource_view = view(view_params_resource.name, Base.metadata, view_params_resource.selectable)
+sector_view = view(view_params_sector.name, Base.metadata, view_params_sector.selectable)
+wfp_commodity_view = view(view_params_wfp_commodity.name, Base.metadata, view_params_wfp_commodity.selectable)
+wfp_market_view = view(view_params_wfp_market.name, Base.metadata, view_params_wfp_market.selectable)
+patch_view = view(view_params_patch.name, Base.metadata, view_params_patch.selectable)
+
+
+class Admin1View(Base):
+ __table__ = admin1_view
+ id: Mapped[int] = column_property(admin1_view.c.id)
+ location_ref: Mapped[int] = column_property(admin1_view.c.location_ref)
+ code: Mapped[str] = column_property(admin1_view.c.code)
+ name: Mapped[str] = column_property(admin1_view.c.name)
+ is_unspecified: Mapped[bool] = column_property(admin1_view.c.is_unspecified)
+ from_cods: Mapped[bool] = column_property(admin1_view.c.from_cods)
+ reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(admin1_view.c.location_code)
+ location_name: Mapped[str] = column_property(admin1_view.c.location_name)
+
+
+class Admin2View(Base):
+ __table__ = admin2_view
+ id: Mapped[int] = column_property(admin2_view.c.id)
+ admin1_ref: Mapped[int] = column_property(admin2_view.c.admin1_ref)
+ code: Mapped[str] = column_property(admin2_view.c.code)
+ name: Mapped[str] = column_property(admin2_view.c.name)
+ is_unspecified: Mapped[bool] = column_property(admin2_view.c.is_unspecified)
+ from_cods: Mapped[bool] = column_property(admin2_view.c.from_cods)
+ reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.reference_period_end)
+ admin1_code: Mapped[str] = column_property(admin2_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(admin2_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(admin2_view.c.admin1_is_unspecified)
+ location_code: Mapped[str] = column_property(admin2_view.c.location_code)
+ location_name: Mapped[str] = column_property(admin2_view.c.location_name)
+
+
+class ConflictEventView(Base):
+ __table__ = conflict_event_view
+ resource_hdx_id: Mapped[str] = column_property(conflict_event_view.c.resource_hdx_id)
+ admin2_ref: Mapped[int] = column_property(conflict_event_view.c.admin2_ref)
+ event_type: Mapped[str] = column_property(conflict_event_view.c.event_type)
+ events: Mapped[int] = column_property(conflict_event_view.c.events)
+ fatalities: Mapped[int] = column_property(conflict_event_view.c.fatalities)
+ reference_period_start: Mapped[DateTime] = column_property(conflict_event_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(conflict_event_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(conflict_event_view.c.location_code)
+ location_name: Mapped[str] = column_property(conflict_event_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(conflict_event_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(conflict_event_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(conflict_event_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(conflict_event_view.c.location_ref)
+ admin2_code: Mapped[str] = column_property(conflict_event_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(conflict_event_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(conflict_event_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(conflict_event_view.c.admin1_ref)
+
+
+class CurrencyView(Base):
+ __table__ = currency_view
+ code: Mapped[str] = column_property(currency_view.c.code)
+ name: Mapped[str] = column_property(currency_view.c.name)
+
+
+class DatasetView(Base):
+ __table__ = dataset_view
+ hdx_id: Mapped[str] = column_property(dataset_view.c.hdx_id)
+ hdx_stub: Mapped[str] = column_property(dataset_view.c.hdx_stub)
+ title: Mapped[str] = column_property(dataset_view.c.title)
+ hdx_provider_stub: Mapped[str] = column_property(dataset_view.c.hdx_provider_stub)
+ hdx_provider_name: Mapped[str] = column_property(dataset_view.c.hdx_provider_name)
+
+
+class FoodPriceView(Base):
+ __table__ = food_price_view
+ resource_hdx_id: Mapped[str] = column_property(food_price_view.c.resource_hdx_id)
+ market_code: Mapped[str] = column_property(food_price_view.c.market_code)
+ commodity_code: Mapped[str] = column_property(food_price_view.c.commodity_code)
+ currency_code: Mapped[str] = column_property(food_price_view.c.currency_code)
+ unit: Mapped[str] = column_property(food_price_view.c.unit)
+ price_flag: Mapped[str] = column_property(food_price_view.c.price_flag)
+ price_type: Mapped[str] = column_property(food_price_view.c.price_type)
+ price: Mapped[Decimal] = column_property(food_price_view.c.price)
+ reference_period_start: Mapped[DateTime] = column_property(food_price_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(food_price_view.c.reference_period_end)
+ admin2_ref: Mapped[int] = column_property(food_price_view.c.admin2_ref)
+ market_name: Mapped[str] = column_property(food_price_view.c.market_name)
+ lat: Mapped[float] = column_property(food_price_view.c.lat)
+ lon: Mapped[float] = column_property(food_price_view.c.lon)
+ commodity_category: Mapped[str] = column_property(food_price_view.c.commodity_category)
+ commodity_name: Mapped[str] = column_property(food_price_view.c.commodity_name)
+ location_code: Mapped[str] = column_property(food_price_view.c.location_code)
+ location_name: Mapped[str] = column_property(food_price_view.c.location_name)
+ location_ref: Mapped[int] = column_property(food_price_view.c.location_ref)
+ admin1_code: Mapped[str] = column_property(food_price_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(food_price_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(food_price_view.c.admin1_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(food_price_view.c.admin1_ref)
+ admin2_code: Mapped[str] = column_property(food_price_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(food_price_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(food_price_view.c.admin2_is_unspecified)
+
+
+class FoodSecurityView(Base):
+ __table__ = food_security_view
+ resource_hdx_id: Mapped[str] = column_property(food_security_view.c.resource_hdx_id)
+ admin2_ref: Mapped[int] = column_property(food_security_view.c.admin2_ref)
+ ipc_phase: Mapped[str] = column_property(food_security_view.c.ipc_phase)
+ ipc_type: Mapped[str] = column_property(food_security_view.c.ipc_type)
+ population_in_phase: Mapped[int] = column_property(food_security_view.c.population_in_phase)
+ population_fraction_in_phase: Mapped[float] = column_property(food_security_view.c.population_fraction_in_phase)
+ reference_period_start: Mapped[DateTime] = column_property(food_security_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(food_security_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(food_security_view.c.location_code)
+ location_name: Mapped[str] = column_property(food_security_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(food_security_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(food_security_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(food_security_view.c.location_ref)
+ admin2_code: Mapped[str] = column_property(food_security_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(food_security_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(food_security_view.c.admin1_ref)
+
+
+class FundingView(Base):
+ __table__ = funding_view
+ resource_hdx_id: Mapped[str] = column_property(funding_view.c.resource_hdx_id)
+ appeal_code: Mapped[str] = column_property(funding_view.c.appeal_code)
+ location_ref: Mapped[int] = column_property(funding_view.c.location_ref)
+ appeal_name: Mapped[str] = column_property(funding_view.c.appeal_name)
+ appeal_type: Mapped[str] = column_property(funding_view.c.appeal_type)
+ requirements_usd: Mapped[Decimal] = column_property(funding_view.c.requirements_usd)
+ funding_usd: Mapped[Decimal] = column_property(funding_view.c.funding_usd)
+ funding_pct: Mapped[Decimal] = column_property(funding_view.c.funding_pct)
+ reference_period_start: Mapped[DateTime] = column_property(funding_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(funding_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(funding_view.c.location_code)
+ location_name: Mapped[str] = column_property(funding_view.c.location_name)
+
+
+class HumanitarianNeedsView(Base):
+ __table__ = humanitarian_needs_view
+ resource_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.resource_hdx_id)
+ admin2_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin2_ref)
+ gender: Mapped[str] = column_property(humanitarian_needs_view.c.gender)
+ age_range: Mapped[str] = column_property(humanitarian_needs_view.c.age_range)
+ min_age: Mapped[int] = column_property(humanitarian_needs_view.c.min_age)
+ max_age: Mapped[int] = column_property(humanitarian_needs_view.c.max_age)
+ sector_code: Mapped[str] = column_property(humanitarian_needs_view.c.sector_code)
+ population_group: Mapped[str] = column_property(humanitarian_needs_view.c.population_group)
+ population_status: Mapped[str] = column_property(humanitarian_needs_view.c.population_status)
+ disabled_marker: Mapped[str] = column_property(humanitarian_needs_view.c.disabled_marker)
+ population: Mapped[int] = column_property(humanitarian_needs_view.c.population)
+ reference_period_start: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_end)
+ sector_name: Mapped[str] = column_property(humanitarian_needs_view.c.sector_name)
+ location_code: Mapped[str] = column_property(humanitarian_needs_view.c.location_code)
+ location_name: Mapped[str] = column_property(humanitarian_needs_view.c.location_name)
+ location_ref: Mapped[int] = column_property(humanitarian_needs_view.c.location_ref)
+ admin1_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin1_is_unspecified)
+ admin2_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin1_ref)
+
+
+class LocationView(Base):
+ __table__ = location_view
+ id: Mapped[int] = column_property(location_view.c.id)
+ code: Mapped[str] = column_property(location_view.c.code)
+ name: Mapped[str] = column_property(location_view.c.name)
+ from_cods: Mapped[bool] = column_property(location_view.c.from_cods)
+ reference_period_start: Mapped[DateTime] = column_property(location_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(location_view.c.reference_period_end)
+
+
+class NationalRiskView(Base):
+ __table__ = national_risk_view
+ resource_hdx_id: Mapped[str] = column_property(national_risk_view.c.resource_hdx_id)
+ location_ref: Mapped[int] = column_property(national_risk_view.c.location_ref)
+ risk_class: Mapped[RiskClass] = column_property(national_risk_view.c.risk_class)
+ global_rank: Mapped[int] = column_property(national_risk_view.c.global_rank)
+ overall_risk: Mapped[float] = column_property(national_risk_view.c.overall_risk)
+ hazard_exposure_risk: Mapped[float] = column_property(national_risk_view.c.hazard_exposure_risk)
+ vulnerability_risk: Mapped[float] = column_property(national_risk_view.c.vulnerability_risk)
+ coping_capacity_risk: Mapped[float] = column_property(national_risk_view.c.coping_capacity_risk)
+ meta_missing_indicators_pct: Mapped[float] = column_property(national_risk_view.c.meta_missing_indicators_pct)
+ meta_avg_recentness_years: Mapped[float] = column_property(national_risk_view.c.meta_avg_recentness_years)
+ reference_period_start: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(national_risk_view.c.location_code)
+ location_name: Mapped[str] = column_property(national_risk_view.c.location_name)
+
+
+class OperationalPresenceView(Base):
+ __table__ = operational_presence_view
+ resource_hdx_id: Mapped[str] = column_property(operational_presence_view.c.resource_hdx_id)
+ admin2_ref: Mapped[int] = column_property(operational_presence_view.c.admin2_ref)
+ org_acronym: Mapped[str] = column_property(operational_presence_view.c.org_acronym)
+ org_name: Mapped[str] = column_property(operational_presence_view.c.org_name)
+ sector_code: Mapped[str] = column_property(operational_presence_view.c.sector_code)
+ reference_period_start: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_end)
+ org_type_code: Mapped[str] = column_property(operational_presence_view.c.org_type_code)
+ org_type_description: Mapped[str] = column_property(operational_presence_view.c.org_type_description)
+ sector_name: Mapped[str] = column_property(operational_presence_view.c.sector_name)
+ location_code: Mapped[str] = column_property(operational_presence_view.c.location_code)
+ location_name: Mapped[str] = column_property(operational_presence_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(operational_presence_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(operational_presence_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(operational_presence_view.c.location_ref)
+ admin2_code: Mapped[str] = column_property(operational_presence_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(operational_presence_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(operational_presence_view.c.admin1_ref)
+
+
+class OrgTypeView(Base):
+ __table__ = org_type_view
+ code: Mapped[str] = column_property(org_type_view.c.code)
+ description: Mapped[str] = column_property(org_type_view.c.description)
+
+
+class OrgView(Base):
+ __table__ = org_view
+ acronym: Mapped[str] = column_property(org_view.c.acronym)
+ name: Mapped[str] = column_property(org_view.c.name)
+ org_type_code: Mapped[str] = column_property(org_view.c.org_type_code)
+ org_type_description: Mapped[str] = column_property(org_view.c.org_type_description)
+
+
+class PopulationView(Base):
+ __table__ = population_view
+ resource_hdx_id: Mapped[str] = column_property(population_view.c.resource_hdx_id)
+ admin2_ref: Mapped[int] = column_property(population_view.c.admin2_ref)
+ gender: Mapped[Gender] = column_property(population_view.c.gender)
+ age_range: Mapped[str] = column_property(population_view.c.age_range)
+ min_age: Mapped[int] = column_property(population_view.c.min_age)
+ max_age: Mapped[int] = column_property(population_view.c.max_age)
+ population: Mapped[int] = column_property(population_view.c.population)
+ reference_period_start: Mapped[DateTime] = column_property(population_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(population_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(population_view.c.location_code)
+ location_name: Mapped[str] = column_property(population_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(population_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(population_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(population_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(population_view.c.location_ref)
+ admin2_code: Mapped[str] = column_property(population_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(population_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(population_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(population_view.c.admin1_ref)
+
+
+class PovertyRateView(Base):
+ __table__ = poverty_rate_view
+ resource_hdx_id: Mapped[str] = column_property(poverty_rate_view.c.resource_hdx_id)
+ admin1_ref: Mapped[int] = column_property(poverty_rate_view.c.admin1_ref)
+ admin1_name: Mapped[str] = column_property(poverty_rate_view.c.admin1_name)
+ mpi: Mapped[float] = column_property(poverty_rate_view.c.mpi)
+ headcount_ratio: Mapped[float] = column_property(poverty_rate_view.c.headcount_ratio)
+ intensity_of_deprivation: Mapped[float] = column_property(poverty_rate_view.c.intensity_of_deprivation)
+ vulnerable_to_poverty: Mapped[float] = column_property(poverty_rate_view.c.vulnerable_to_poverty)
+ in_severe_poverty: Mapped[float] = column_property(poverty_rate_view.c.in_severe_poverty)
+ reference_period_start: Mapped[DateTime] = column_property(poverty_rate_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(poverty_rate_view.c.reference_period_end)
+ location_code: Mapped[str] = column_property(poverty_rate_view.c.location_code)
+ location_name: Mapped[str] = column_property(poverty_rate_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(poverty_rate_view.c.admin1_code)
+ admin1_is_unspecified: Mapped[bool] = column_property(poverty_rate_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(poverty_rate_view.c.location_ref)
+
+
+class RefugeesView(Base):
+ __table__ = refugees_view
+ resource_hdx_id: Mapped[str] = column_property(refugees_view.c.resource_hdx_id)
+ origin_location_ref: Mapped[int] = column_property(refugees_view.c.origin_location_ref)
+ asylum_location_ref: Mapped[int] = column_property(refugees_view.c.asylum_location_ref)
+ population_group: Mapped[str] = column_property(refugees_view.c.population_group)
+ gender: Mapped[str] = column_property(refugees_view.c.gender)
+ age_range: Mapped[str] = column_property(refugees_view.c.age_range)
+ min_age: Mapped[int] = column_property(refugees_view.c.min_age)
+ max_age: Mapped[int] = column_property(refugees_view.c.max_age)
+ population: Mapped[int] = column_property(refugees_view.c.population)
+ reference_period_start: Mapped[DateTime] = column_property(refugees_view.c.reference_period_start)
+ reference_period_end: Mapped[DateTime] = column_property(refugees_view.c.reference_period_end)
+ origin_location_code: Mapped[str] = column_property(refugees_view.c.origin_location_code)
+ origin_location_name: Mapped[str] = column_property(refugees_view.c.origin_location_name)
+ asylum_location_code: Mapped[str] = column_property(refugees_view.c.asylum_location_code)
+ asylum_location_name: Mapped[str] = column_property(refugees_view.c.asylum_location_name)
+
+
+class ResourceView(Base):
+ __table__ = resource_view
+ hdx_id: Mapped[str] = column_property(resource_view.c.hdx_id)
+ dataset_hdx_id: Mapped[str] = column_property(resource_view.c.dataset_hdx_id)
+ name: Mapped[str] = column_property(resource_view.c.name)
+ format: Mapped[str] = column_property(resource_view.c.format)
+ update_date: Mapped[DateTime] = column_property(resource_view.c.update_date)
+ is_hxl: Mapped[bool] = column_property(resource_view.c.is_hxl)
+ download_url: Mapped[str] = column_property(resource_view.c.download_url)
+ hapi_updated_date: Mapped[DateTime] = column_property(resource_view.c.hapi_updated_date)
+ dataset_hdx_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_stub)
+ dataset_title: Mapped[str] = column_property(resource_view.c.dataset_title)
+ dataset_hdx_provider_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_stub)
+ dataset_hdx_provider_name: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_name)
+
+
+class SectorView(Base):
+ __table__ = sector_view
+ code: Mapped[str] = column_property(sector_view.c.code)
+ name: Mapped[str] = column_property(sector_view.c.name)
+
+
+class WfpCommodityView(Base):
+ __table__ = wfp_commodity_view
+ code: Mapped[str] = column_property(wfp_commodity_view.c.code)
+ category: Mapped[str] = column_property(wfp_commodity_view.c.category)
+ name: Mapped[str] = column_property(wfp_commodity_view.c.name)
+
+
+class WfpMarketView(Base):
+ __table__ = wfp_market_view
+ code: Mapped[str] = column_property(wfp_market_view.c.code)
+ admin2_ref: Mapped[int] = column_property(wfp_market_view.c.admin2_ref)
+ name: Mapped[str] = column_property(wfp_market_view.c.name)
+ lat: Mapped[float] = column_property(wfp_market_view.c.lat)
+ lon: Mapped[float] = column_property(wfp_market_view.c.lon)
+ location_code: Mapped[str] = column_property(wfp_market_view.c.location_code)
+ location_name: Mapped[str] = column_property(wfp_market_view.c.location_name)
+ admin1_code: Mapped[str] = column_property(wfp_market_view.c.admin1_code)
+ admin1_name: Mapped[str] = column_property(wfp_market_view.c.admin1_name)
+ admin1_is_unspecified: Mapped[bool] = column_property(wfp_market_view.c.admin1_is_unspecified)
+ location_ref: Mapped[int] = column_property(wfp_market_view.c.location_ref)
+ admin2_code: Mapped[str] = column_property(wfp_market_view.c.admin2_code)
+ admin2_name: Mapped[str] = column_property(wfp_market_view.c.admin2_name)
+ admin2_is_unspecified: Mapped[bool] = column_property(wfp_market_view.c.admin2_is_unspecified)
+ admin1_ref: Mapped[int] = column_property(wfp_market_view.c.admin1_ref)
+
+
+class PatchView(Base):
+ __table__ = patch_view
+ id: Mapped[int] = column_property(patch_view.c.id)
+ patch_sequence_number: Mapped[int] = column_property(patch_view.c.patch_sequence_number)
+ commit_hash: Mapped[str] = column_property(patch_view.c.commit_hash)
+ commit_date: Mapped[DateTime] = column_property(patch_view.c.commit_date)
+ patch_path: Mapped[str] = column_property(patch_view.c.patch_path)
+ patch_permalink_url: Mapped[str] = column_property(patch_view.c.patch_permalink_url)
+ patch_target: Mapped[str] = column_property(patch_view.c.patch_target)
+ patch_hash: Mapped[str] = column_property(patch_view.c.patch_hash)
+ state: Mapped[str] = column_property(patch_view.c.state)
+ execution_date: Mapped[DateTime] = column_property(patch_view.c.execution_date)
diff --git a/hdx_hapi/db/models/views/db_admin1_view.py b/hdx_hapi/db/models/views/db_admin1_view.py
deleted file mode 100644
index 7b71503c..00000000
--- a/hdx_hapi/db/models/views/db_admin1_view.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import column_property, Mapped
-
-from hapi_schema.db_admin1 import view_params_admin1
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-admin1_view = view(view_params_admin1.name, Base.metadata, view_params_admin1.selectable)
-
-class Admin1View(Base):
- __table__ = admin1_view
-
- id: Mapped[int] = column_property(admin1_view.c.id)
- location_ref: Mapped[int] = column_property(admin1_view.c.location_ref)
- code: Mapped[str] = column_property(admin1_view.c.code)
- name: Mapped[str] = column_property(admin1_view.c.name)
- is_unspecified: Mapped[bool] = column_property(admin1_view.c.is_unspecified)
-
- reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.reference_period_end)
- hapi_updated_date: Mapped[DateTime] = column_property(admin1_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(admin1_view.c.hapi_replaced_date)
-
- location_code: Mapped[str] = column_property(admin1_view.c.location_code)
- location_name: Mapped[str] = column_property(admin1_view.c.location_name)
- location_reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.location_reference_period_start)
- location_reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.location_reference_period_end)
diff --git a/hdx_hapi/db/models/views/db_admin2_view.py b/hdx_hapi/db/models/views/db_admin2_view.py
deleted file mode 100644
index 6d8566b8..00000000
--- a/hdx_hapi/db/models/views/db_admin2_view.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_admin2 import view_params_admin2
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-admin2_view = view(view_params_admin2.name, Base.metadata, view_params_admin2.selectable)
-
-
-class Admin2View(Base):
- __table__ = admin2_view
-
- id: Mapped[int] = column_property(admin2_view.c.id)
- admin1_ref: Mapped[int] = column_property(admin2_view.c.admin1_ref)
- code: Mapped[str] = column_property(admin2_view.c.code)
- name: Mapped[str] = column_property(admin2_view.c.name)
- is_unspecified: Mapped[bool] = column_property(admin2_view.c.is_unspecified)
- reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.reference_period_end)
- hapi_updated_date: Mapped[DateTime] = column_property(admin2_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(admin2_view.c.hapi_replaced_date)
-
- admin1_code: Mapped[str] = column_property(admin2_view.c.admin1_code)
- admin1_name: Mapped[str] = column_property(admin2_view.c.admin1_name)
- admin1_is_unspecified: Mapped[bool] = column_property(admin2_view.c.admin1_is_unspecified)
- admin1_reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.admin1_reference_period_start)
- admin1_reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.admin1_reference_period_end)
-
- location_code: Mapped[str] = column_property(admin2_view.c.location_code)
- location_name: Mapped[str] = column_property(admin2_view.c.location_name)
- location_reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.location_reference_period_start)
- location_reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.location_reference_period_end)
diff --git a/hdx_hapi/db/models/views/db_age_range_view.py b/hdx_hapi/db/models/views/db_age_range_view.py
deleted file mode 100644
index e1973299..00000000
--- a/hdx_hapi/db/models/views/db_age_range_view.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from sqlalchemy.orm import column_property, Mapped
-from hapi_schema.db_age_range import view_params_age_range
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-
-age_range_view = view(view_params_age_range.name, Base.metadata, view_params_age_range.selectable)
-
-class AgeRangeView(Base):
- __table__ = age_range_view
-
- code: Mapped[str] = column_property(age_range_view.c.code)
- age_min: Mapped[int] = column_property(age_range_view.c.age_min)
- age_max: Mapped[int] = column_property(age_range_view.c.age_max)
\ No newline at end of file
diff --git a/hdx_hapi/db/models/views/db_dataset_view.py b/hdx_hapi/db/models/views/db_dataset_view.py
deleted file mode 100644
index 62c6101b..00000000
--- a/hdx_hapi/db/models/views/db_dataset_view.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_dataset import view_params_dataset
-
-from hdx_hapi.db.models.base import Base
-from hdx_hapi.db.models.views.util.util import view
-
-dataset_view = view(view_params_dataset.name, Base.metadata, view_params_dataset.selectable)
-
-
-class DatasetView(Base):
- __table__ = dataset_view
-
- id: Mapped[int] = column_property(dataset_view.c.id)
- hdx_id: Mapped[str] = column_property(dataset_view.c.hdx_id)
- hdx_stub: Mapped[str] = column_property(dataset_view.c.hdx_stub)
- title: Mapped[str] = column_property(dataset_view.c.title)
- hdx_provider_stub: Mapped[str] = column_property(dataset_view.c.hdx_provider_stub)
- hdx_provider_name: Mapped[str] = column_property(dataset_view.c.hdx_provider_name)
diff --git a/hdx_hapi/db/models/views/db_food_security_view.py b/hdx_hapi/db/models/views/db_food_security_view.py
deleted file mode 100644
index 3e32c390..00000000
--- a/hdx_hapi/db/models/views/db_food_security_view.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-from hapi_schema.db_food_security import view_params_food_security
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-food_security_view = view(view_params_food_security.name, Base.metadata, view_params_food_security.selectable)
-
-
-class FoodSecurityView(Base):
- __table__ = food_security_view
-
- id: Mapped[int] = column_property(food_security_view.c.id)
- resource_ref: Mapped[int] = column_property(food_security_view.c.resource_ref)
- admin2_ref: Mapped[int] = column_property(food_security_view.c.admin2_ref)
-
- ipc_phase_name: Mapped[str] = column_property(food_security_view.c.ipc_phase_name)
- ipc_phase_code: Mapped[str] = column_property(food_security_view.c.ipc_phase_code)
- ipc_type_code: Mapped[str] = column_property(food_security_view.c.ipc_type_code)
- population_in_phase: Mapped[int] = column_property(food_security_view.c.population_in_phase)
- population_fraction_in_phase: Mapped[float] = column_property(food_security_view.c.population_fraction_in_phase)
-
- reference_period_start: Mapped[DateTime] = column_property(food_security_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(food_security_view.c.reference_period_end)
- source_data: Mapped[str] = column_property(food_security_view.c.source_data)
-
- resource_hdx_id: Mapped[str] = column_property(food_security_view.c.resource_hdx_id)
- resource_name: Mapped[str] = column_property(food_security_view.c.resource_name)
- resource_update_date = column_property(food_security_view.c.resource_update_date)
- hapi_updated_date: Mapped[DateTime] = column_property(food_security_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(food_security_view.c.hapi_replaced_date)
-
- dataset_hdx_id: Mapped[str] = column_property(food_security_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(food_security_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(food_security_view.c.dataset_title)
- dataset_hdx_provider_stub: Mapped[str] = column_property(food_security_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(food_security_view.c.dataset_hdx_provider_name)
-
- location_code: Mapped[str] = column_property(food_security_view.c.location_code)
- location_name: Mapped[str] = column_property(food_security_view.c.location_name)
-
- admin1_code: Mapped[str] = column_property(food_security_view.c.admin1_code)
- admin1_name: Mapped[str] = column_property(food_security_view.c.admin1_name)
- admin1_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin1_is_unspecified)
- location_ref: Mapped[int] = column_property(food_security_view.c.location_ref)
-
- admin2_code: Mapped[str] = column_property(food_security_view.c.admin2_code)
- admin2_name: Mapped[str] = column_property(food_security_view.c.admin2_name)
- admin2_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin2_is_unspecified)
- admin1_ref: Mapped[int] = column_property(food_security_view.c.admin1_ref)
diff --git a/hdx_hapi/db/models/views/db_gender_view.py b/hdx_hapi/db/models/views/db_gender_view.py
deleted file mode 100644
index d8888bb8..00000000
--- a/hdx_hapi/db/models/views/db_gender_view.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_gender import view_params_gender
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-gender_view = view(view_params_gender.name, Base.metadata, view_params_gender.selectable)
-
-
-class GenderView(Base):
- __table__ = gender_view
-
- code: Mapped[str] = column_property(gender_view.c.code)
- description: Mapped[str] = column_property(gender_view.c.description)
diff --git a/hdx_hapi/db/models/views/db_humanitarian_needs_view.py b/hdx_hapi/db/models/views/db_humanitarian_needs_view.py
deleted file mode 100644
index cb1a6344..00000000
--- a/hdx_hapi/db/models/views/db_humanitarian_needs_view.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-humanitarian_needs_view = view(view_params_humanitarian_needs.name, Base.metadata,
- view_params_humanitarian_needs.selectable)
-
-
-class HumanitarianNeedsView(Base):
- __table__ = humanitarian_needs_view
-
- id: Mapped[int] = column_property(humanitarian_needs_view.c.id)
- resource_ref: Mapped[int] = column_property(humanitarian_needs_view.c.resource_ref)
- admin2_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin2_ref)
-
- population_status_code: Mapped[str] = column_property(humanitarian_needs_view.c.population_status_code)
- population_group_code: Mapped[str] = column_property(humanitarian_needs_view.c.population_group_code)
- sector_code: Mapped[str] = column_property(humanitarian_needs_view.c.sector_code)
- sector_name: Mapped[str] = column_property(humanitarian_needs_view.c.sector_name)
-
- gender_code: Mapped[str] = column_property(humanitarian_needs_view.c.gender_code)
- age_range_code: Mapped[str] = column_property(humanitarian_needs_view.c.age_range_code)
- disabled_marker: Mapped[bool] = column_property(humanitarian_needs_view.c.disabled_marker)
- population: Mapped[int] = column_property(humanitarian_needs_view.c.population)
-
- reference_period_start: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_end)
- source_data: Mapped[str] = column_property(humanitarian_needs_view.c.source_data)
-
- resource_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.resource_hdx_id)
- resource_name: Mapped[str] = column_property(humanitarian_needs_view.c.resource_name)
- resource_update_date = column_property(humanitarian_needs_view.c.resource_update_date)
- hapi_updated_date: Mapped[DateTime] = column_property(humanitarian_needs_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(humanitarian_needs_view.c.hapi_replaced_date)
-
- dataset_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_title)
- dataset_hdx_provider_stub: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_provider_name)
-
- location_code: Mapped[str] = column_property(humanitarian_needs_view.c.location_code)
- location_name: Mapped[str] = column_property(humanitarian_needs_view.c.location_name)
-
- admin1_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_code)
- admin1_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_name)
- admin1_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin1_is_unspecified)
- location_ref: Mapped[int] = column_property(humanitarian_needs_view.c.location_ref)
-
- admin2_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_code)
- admin2_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_name)
- admin2_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin2_is_unspecified)
- admin1_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin1_ref)
diff --git a/hdx_hapi/db/models/views/db_location_view.py b/hdx_hapi/db/models/views/db_location_view.py
deleted file mode 100644
index 61a8e039..00000000
--- a/hdx_hapi/db/models/views/db_location_view.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_location import view_params_location
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-location_view = view(view_params_location.name, Base.metadata, view_params_location.selectable)
-
-
-class LocationView(Base):
- __table__ = location_view
-
- id: Mapped[int] = column_property(location_view.c.id)
- code: Mapped[str] = column_property(location_view.c.code)
- name: Mapped[str] = column_property(location_view.c.name)
-
- reference_period_start: Mapped[DateTime] = column_property(location_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(location_view.c.reference_period_end)
- hapi_updated_date: Mapped[DateTime] = column_property(location_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(location_view.c.hapi_replaced_date)
\ No newline at end of file
diff --git a/hdx_hapi/db/models/views/db_national_risk_view.py b/hdx_hapi/db/models/views/db_national_risk_view.py
deleted file mode 100644
index 489b9a8e..00000000
--- a/hdx_hapi/db/models/views/db_national_risk_view.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-from hapi_schema.db_national_risk import view_params_national_risk
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-national_risk_view = view(view_params_national_risk.name, Base.metadata, view_params_national_risk.selectable)
-
-
-class NationalRiskView(Base):
- __table__ = national_risk_view
-
- id: Mapped[int] = column_property(national_risk_view.c.id)
-
- resource_ref: Mapped[int] = column_property(national_risk_view.c.resource_ref)
- admin2_ref: Mapped[int] = column_property(national_risk_view.c.admin2_ref)
-
- risk_class: Mapped[int] = column_property(national_risk_view.c.risk_class)
- global_rank: Mapped[int] = column_property(national_risk_view.c.global_rank)
- overall_risk: Mapped[float] = column_property(national_risk_view.c.overall_risk)
- hazard_exposure_risk: Mapped[float] = column_property(national_risk_view.c.hazard_exposure_risk)
- vulnerability_risk: Mapped[float] = column_property(national_risk_view.c.vulnerability_risk)
- coping_capacity_risk: Mapped[float] = column_property(national_risk_view.c.coping_capacity_risk)
- meta_missing_indicators_pct: Mapped[float] = column_property(national_risk_view.c.meta_missing_indicators_pct)
- meta_avg_recentness_years: Mapped[float] = column_property(national_risk_view.c.meta_avg_recentness_years)
-
- reference_period_start: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_end)
-
- source_data: Mapped[str] = column_property(national_risk_view.c.source_data)
-
- dataset_hdx_id: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(national_risk_view.c.dataset_title)
- dataset_hdx_provider_stub: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_provider_name)
-
- resource_hdx_id: Mapped[str] = column_property(national_risk_view.c.resource_hdx_id)
- resource_name: Mapped[str] = column_property(national_risk_view.c.resource_name)
- resource_update_date = column_property(national_risk_view.c.resource_update_date)
- hapi_updated_date: Mapped[DateTime] = column_property(national_risk_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(national_risk_view.c.hapi_replaced_date)
-
- # sector_name: Mapped[str] = column_property(national_risk_view.c.sector_name)
-
- location_code: Mapped[str] = column_property(national_risk_view.c.location_code)
- location_name: Mapped[str] = column_property(national_risk_view.c.location_name)
diff --git a/hdx_hapi/db/models/views/db_operational_presence_view.py b/hdx_hapi/db/models/views/db_operational_presence_view.py
deleted file mode 100644
index 12887fa9..00000000
--- a/hdx_hapi/db/models/views/db_operational_presence_view.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_operational_presence import view_params_operational_presence
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-operational_presence_view = \
- view(view_params_operational_presence.name, Base.metadata, view_params_operational_presence.selectable)
-
-
-class OperationalPresenceView(Base):
- __table__ = operational_presence_view
-
- id: Mapped[int] = column_property(operational_presence_view.c.id)
- resource_ref: Mapped[int] = column_property(operational_presence_view.c.resource_ref)
- org_ref: Mapped[int] = column_property(operational_presence_view.c.org_ref)
-
- sector_code: Mapped[str] = column_property(operational_presence_view.c.sector_code)
- admin2_ref: Mapped[int] = column_property(operational_presence_view.c.admin2_ref)
- reference_period_start: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_end)
- source_data: Mapped[str] = column_property(operational_presence_view.c.source_data)
-
- # Additional fields from other tables in the view
- dataset_hdx_id: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(operational_presence_view.c.dataset_title)
- dataset_hdx_provider_stub: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_provider_name)
- resource_hdx_id: Mapped[str] = column_property(operational_presence_view.c.resource_hdx_id)
- resource_name: Mapped[str] = column_property(operational_presence_view.c.resource_name)
- resource_update_date: Mapped[DateTime] = column_property(operational_presence_view.c.resource_update_date)
- hapi_updated_date: Mapped[DateTime] = column_property(operational_presence_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(operational_presence_view.c.hapi_replaced_date)
- org_acronym: Mapped[str] = column_property(operational_presence_view.c.org_acronym)
- org_name: Mapped[str] = column_property(operational_presence_view.c.org_name)
- org_type_code: Mapped[str] = column_property(operational_presence_view.c.org_type_code)
- org_type_description: Mapped[str] = column_property(operational_presence_view.c.org_type_description)
- sector_name: Mapped[str] = column_property(operational_presence_view.c.sector_name)
- location_code: Mapped[str] = column_property(operational_presence_view.c.location_code)
- location_name: Mapped[str] = column_property(operational_presence_view.c.location_name)
- admin1_code: Mapped[str] = column_property(operational_presence_view.c.admin1_code)
- admin1_name: Mapped[str] = column_property(operational_presence_view.c.admin1_name)
- admin1_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin1_is_unspecified)
- location_ref: Mapped[int] = column_property(operational_presence_view.c.location_ref)
- admin2_code: Mapped[str] = column_property(operational_presence_view.c.admin2_code)
- admin2_name: Mapped[str] = column_property(operational_presence_view.c.admin2_name)
- admin2_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin2_is_unspecified)
- admin1_ref: Mapped[int] = column_property(operational_presence_view.c.admin1_ref)
diff --git a/hdx_hapi/db/models/views/db_org_type_view.py b/hdx_hapi/db/models/views/db_org_type_view.py
deleted file mode 100644
index 70e9aca7..00000000
--- a/hdx_hapi/db/models/views/db_org_type_view.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_org_type import view_params_org_type
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-org_type_view = view(view_params_org_type.name, Base.metadata, view_params_org_type.selectable)
-
-
-class OrgTypeView(Base):
- __table__ = org_type_view
-
- code: Mapped[str] = column_property(org_type_view.c.code)
- description: Mapped[str] = column_property(org_type_view.c.description)
diff --git a/hdx_hapi/db/models/views/db_org_view.py b/hdx_hapi/db/models/views/db_org_view.py
deleted file mode 100644
index bada06c6..00000000
--- a/hdx_hapi/db/models/views/db_org_view.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_org import view_params_org
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-org_view = view(view_params_org.name, Base.metadata, view_params_org.selectable)
-
-
-class OrgView(Base):
- __table__ = org_view
-
- id: Mapped[int] = column_property(org_view.c.id)
- acronym: Mapped[str] = column_property(org_view.c.acronym)
- name: Mapped[str] = column_property(org_view.c.name)
- org_type_code: Mapped[str] = column_property(org_view.c.org_type_code)
- reference_period_start: Mapped[DateTime] = column_property(org_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(org_view.c.reference_period_end)
- org_type_description: Mapped[str] = column_property(org_view.c.org_type_description)
diff --git a/hdx_hapi/db/models/views/db_population_group_view.py b/hdx_hapi/db/models/views/db_population_group_view.py
deleted file mode 100644
index 4bafcb05..00000000
--- a/hdx_hapi/db/models/views/db_population_group_view.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_population_group import view_params_population_group
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-population_group_view = view(view_params_population_group.name, Base.metadata, view_params_population_group.selectable)
-
-
-class PopulationGroupView(Base):
- __table__ = population_group_view
-
- code: Mapped[str] = column_property(population_group_view.c.code)
- description: Mapped[str] = column_property(population_group_view.c.description)
diff --git a/hdx_hapi/db/models/views/db_population_status_view.py b/hdx_hapi/db/models/views/db_population_status_view.py
deleted file mode 100644
index 8d503365..00000000
--- a/hdx_hapi/db/models/views/db_population_status_view.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_population_status import view_params_population_status
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-population_status_view = view(view_params_population_status.name, Base.metadata,
- view_params_population_status.selectable)
-
-
-class PopulationStatusView(Base):
- __table__ = population_status_view
-
- code: Mapped[str] = column_property(population_status_view.c.code)
- description: Mapped[str] = column_property(population_status_view.c.description)
diff --git a/hdx_hapi/db/models/views/db_population_view.py b/hdx_hapi/db/models/views/db_population_view.py
deleted file mode 100644
index eb1b263e..00000000
--- a/hdx_hapi/db/models/views/db_population_view.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-from hapi_schema.db_population import view_params_population
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-population_view = view(view_params_population.name, Base.metadata, view_params_population.selectable)
-
-
-class PopulationView(Base):
- __table__ = population_view
-
- id: Mapped[int] = column_property(population_view.c.id)
- resource_ref: Mapped[int] = column_property(population_view.c.resource_ref)
- admin2_ref: Mapped[int] = column_property(population_view.c.admin2_ref)
- gender_code: Mapped[str] = column_property(population_view.c.gender_code)
- age_range_code: Mapped[str] = column_property(population_view.c.age_range_code)
-
- population: Mapped[int] = column_property(population_view.c.population)
- reference_period_start: Mapped[DateTime] = column_property(population_view.c.reference_period_start)
- reference_period_end: Mapped[DateTime] = column_property(population_view.c.reference_period_end)
- source_data: Mapped[str] = column_property(population_view.c.source_data)
-
- resource_hdx_id: Mapped[str] = column_property(population_view.c.resource_hdx_id)
- resource_name: Mapped[str] = column_property(population_view.c.resource_name)
- resource_update_date = column_property(population_view.c.resource_update_date)
- hapi_updated_date: Mapped[DateTime] = column_property(population_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(population_view.c.hapi_replaced_date)
-
- dataset_hdx_id: Mapped[str] = column_property(population_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(population_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(population_view.c.dataset_title)
- dataset_hdx_provider_stub: Mapped[str] = column_property(population_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(population_view.c.dataset_hdx_provider_name)
-
- location_code: Mapped[str] = column_property(population_view.c.location_code)
- location_name: Mapped[str] = column_property(population_view.c.location_name)
-
- admin1_code: Mapped[str] = column_property(population_view.c.admin1_code)
- admin1_name: Mapped[str] = column_property(population_view.c.admin1_name)
- admin1_is_unspecified: Mapped[bool] = column_property(population_view.c.admin1_is_unspecified)
- location_ref: Mapped[int] = column_property(population_view.c.location_ref)
-
- admin2_code: Mapped[str] = column_property(population_view.c.admin2_code)
- admin2_name: Mapped[str] = column_property(population_view.c.admin2_name)
- admin2_is_unspecified: Mapped[bool] = column_property(population_view.c.admin2_is_unspecified)
- admin1_ref: Mapped[int] = column_property(population_view.c.admin1_ref)
diff --git a/hdx_hapi/db/models/views/db_resource_view.py b/hdx_hapi/db/models/views/db_resource_view.py
deleted file mode 100644
index 75ad875a..00000000
--- a/hdx_hapi/db/models/views/db_resource_view.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from sqlalchemy import DateTime
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_resource import view_params_resource
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-resource_view = view(view_params_resource.name, Base.metadata, view_params_resource.selectable)
-
-
-class ResourceView(Base):
- __table__ = resource_view
-
- id: Mapped[int] = column_property(resource_view.c.id)
- dataset_ref: Mapped[int] = column_property(resource_view.c.dataset_ref)
- hdx_id: Mapped[str] = column_property(resource_view.c.hdx_id)
- name: Mapped[str] = column_property(resource_view.c.name)
- format: Mapped[str] = column_property(resource_view.c.format)
- update_date = column_property(resource_view.c.update_date)
- is_hxl: Mapped[bool] = column_property(resource_view.c.is_hxl)
- download_url: Mapped[str] = column_property(resource_view.c.download_url)
- hapi_updated_date: Mapped[DateTime] = column_property(resource_view.c.hapi_updated_date)
- hapi_replaced_date: Mapped[DateTime] = column_property(resource_view.c.hapi_replaced_date)
-
- dataset_hdx_id: Mapped[str] = column_property(resource_view.c.dataset_hdx_id)
- dataset_hdx_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_stub)
- dataset_title: Mapped[str] = column_property(resource_view.c.dataset_title)
-
- dataset_hdx_provider_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_stub)
- dataset_hdx_provider_name: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_name)
-
\ No newline at end of file
diff --git a/hdx_hapi/db/models/views/db_sector_view.py b/hdx_hapi/db/models/views/db_sector_view.py
deleted file mode 100644
index 92d44cc4..00000000
--- a/hdx_hapi/db/models/views/db_sector_view.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sqlalchemy.orm import Mapped, column_property
-
-from hapi_schema.db_sector import view_params_sector
-
-from hdx_hapi.db.models.views.util.util import view
-from hdx_hapi.db.models.base import Base
-
-
-sector_view = view(view_params_sector.name, Base.metadata, view_params_sector.selectable)
-
-
-class SectorView(Base):
- __table__ = sector_view
-
- code: Mapped[str] = column_property(sector_view.c.code)
- name: Mapped[str] = column_property(sector_view.c.name)
diff --git a/hdx_hapi/endpoints/get_admin_level.py b/hdx_hapi/endpoints/get_admin_level.py
index 30874cc7..3615063e 100644
--- a/hdx_hapi/endpoints/get_admin_level.py
+++ b/hdx_hapi/endpoints/get_admin_level.py
@@ -1,8 +1,5 @@
from typing import Annotated
-from datetime import date
-
from fastapi import Depends, Query, APIRouter
-from pydantic import NaiveDatetime
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.config.config import get_config
@@ -15,10 +12,6 @@
DOC_LOCATION_NAME,
DOC_SEE_ADMIN1,
DOC_SEE_LOC,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
)
from hdx_hapi.endpoints.models.base import HapiGenericResponse
@@ -26,7 +19,9 @@
from hdx_hapi.endpoints.util.util import (
CommonEndpointParams,
OutputFormat,
+ ReferencePeriodParameters,
common_endpoint_parameters,
+ reference_period_parameters,
)
from hdx_hapi.services.admin1_logic import get_admin1_srv
from hdx_hapi.services.admin2_logic import get_admin2_srv
@@ -42,48 +37,30 @@
@router.get(
- '/api/location',
+ '/api/metadata/location',
response_model=HapiGenericResponse[LocationResponse],
summary='Get the list of locations (typically countries) included in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/location',
+ '/api/v1/metadata/location',
response_model=HapiGenericResponse[LocationResponse],
summary='Get the list of locations (typically countries) included in HAPI',
)
async def get_locations(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
db: AsyncSession = Depends(get_db),
code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE}')] = None,
name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME}')] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
output_format: OutputFormat = OutputFormat.JSON,
):
result = await get_locations_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
)
return transform_result_to_csv_stream_if_requested(result, output_format, LocationResponse)
@@ -95,50 +72,48 @@ async def get_locations(
@router.get(
- '/api/admin1',
+ '/api/metadata/admin1',
response_model=HapiGenericResponse[Admin1Response],
summary='Get the list of first-level subnational administrative divisions available in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/admin1',
+ '/api/v1/metadata/admin1',
response_model=HapiGenericResponse[Admin1Response],
summary='Get the list of first-level subnational administrative divisions available in HAPI',
)
async def get_admin1(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
db: AsyncSession = Depends(get_db),
code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE}')] = None,
name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME}')] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
+ # hapi_updated_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
+ # ] = None,
+ # hapi_updated_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
+ # ] = None,
+ # hapi_replaced_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
+ # ] = None,
+ # hapi_replaced_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
+ # ] = None,
location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None,
output_format: OutputFormat = OutputFormat.JSON,
):
result = await get_admin1_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
location_code=location_code,
location_name=location_name,
)
@@ -152,37 +127,38 @@ async def get_admin1(
@router.get(
- '/api/admin2',
+ '/api/metadata/admin2',
response_model=HapiGenericResponse[Admin2Response],
summary='Get the list of second-level administrative divisions available in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/admin2',
+ '/api/v1/metadata/admin2',
response_model=HapiGenericResponse[Admin2Response],
summary='Get the list of second-level administrative divisions available in HAPI',
)
async def get_admin2(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
db: AsyncSession = Depends(get_db),
code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE}')] = None,
name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME}')] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
+ # hapi_updated_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
+ # ] = None,
+ # hapi_updated_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
+ # ] = None,
+ # hapi_replaced_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
+ # ] = None,
+ # hapi_replaced_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
+ # ] = None,
admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None,
admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None,
location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
@@ -191,13 +167,10 @@ async def get_admin2(
):
result = await get_admin2_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
admin1_code=admin1_code,
admin1_name=admin1_name,
location_code=location_code,
diff --git a/hdx_hapi/endpoints/get_affected_people.py b/hdx_hapi/endpoints/get_affected_people.py
new file mode 100644
index 00000000..d2dc6a55
--- /dev/null
+++ b/hdx_hapi/endpoints/get_affected_people.py
@@ -0,0 +1,185 @@
+from typing import Annotated, Optional
+from fastapi import Depends, Query, APIRouter
+# from pydantic import NaiveDatetime
+
+
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.config.doc_snippets import (
+ DOC_GENDER,
+ DOC_AGE_RANGE,
+ DOC_SECTOR_CODE,
+ DOC_SECTOR_NAME,
+ DOC_ADMIN1_CODE,
+ DOC_ADMIN2_NAME,
+ DOC_ADMIN2_CODE,
+ DOC_LOCATION_CODE,
+ DOC_LOCATION_NAME,
+ DOC_SEE_ADMIN1,
+ DOC_SEE_LOC,
+ DOC_SEE_ADMIN2,
+)
+
+from hdx_hapi.endpoints.models.base import HapiGenericResponse
+from hdx_hapi.endpoints.models.humanitarian_needs import HumanitarianNeedsResponse
+from hdx_hapi.endpoints.models.refugees import RefugeesResponse
+from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
+from hdx_hapi.services.humanitarian_needs_logic import get_humanitarian_needs_srv
+from hdx_hapi.services.refugees_logic import get_refugees_srv
+from hdx_hapi.services.sql_alchemy_session import get_db
+from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus
+from hdx_hapi.endpoints.util.util import (
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+ AdminLevel,
+)
+
+router = APIRouter(
+ tags=['Affected people'],
+)
+
+
+@router.get(
+ '/api/affected-people/humanitarian-needs',
+ response_model=HapiGenericResponse[HumanitarianNeedsResponse],
+ summary='Get humanitarian needs data',
+ include_in_schema=False,
+)
+@router.get(
+ '/api/v1/affected-people/humanitarian-needs',
+ response_model=HapiGenericResponse[HumanitarianNeedsResponse],
+ summary='Get humanitarian needs data',
+)
+async def get_humanitarian_needs(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
+ common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
+ db: AsyncSession = Depends(get_db),
+ admin2_ref: Annotated[Optional[int], Query(description='Admin2 reference')] = None,
+ gender: Annotated[Optional[Gender], Query(max_length=1, description=f'{DOC_GENDER}')] = None,
+ age_range: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_AGE_RANGE}')] = None,
+ min_age: Annotated[Optional[int], Query(description='Min age')] = None,
+ max_age: Annotated[Optional[int], Query(description='Max age')] = None,
+ disabled_marker: Annotated[Optional[DisabledMarker], Query(description='Disabled marker')] = None,
+ sector_code: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_SECTOR_CODE}')] = None,
+ population_group: Annotated[Optional[PopulationGroup], Query(max_length=32, description='Population group')] = None,
+ population_status: Annotated[
+ Optional[PopulationStatus], Query(max_length=32, description='Population status')
+ ] = None,
+ population: Annotated[Optional[int], Query(description='Population')] = None,
+ # reference_period_start: Annotated[
+ # NaiveDatetime | date,
+ # Query(description='Reference period start', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}),
+ # ] = None,
+ # reference_period_end: Annotated[
+ # NaiveDatetime | date,
+ # Query(description='Reference period end', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
+ # ] = None,
+ sector_name: Annotated[Optional[str], Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None,
+ location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
+ ] = None,
+ location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
+ ] = None,
+ location_ref: Annotated[Optional[int], Query(description='Location reference')] = None,
+ admin1_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')
+ ] = None,
+ admin2_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')
+ ] = None,
+ admin2_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')
+ ] = None,
+ admin1_ref: Annotated[Optional[int], Query(description='Admin1 reference')] = None,
+ admin_level: Annotated[Optional[AdminLevel], Query(description='Filter the response by admin level')] = None,
+ output_format: OutputFormat = OutputFormat.JSON,
+):
+ """
+ Return the list of humanitarian needs data
+ """
+ result = await get_humanitarian_needs_srv(
+ pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ admin2_ref=admin2_ref,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
+ disabled_marker=disabled_marker,
+ sector_code=sector_code,
+ population_group=population_group,
+ population_status=population_status,
+ population=population,
+ sector_name=sector_name,
+ location_code=location_code,
+ location_name=location_name,
+ location_ref=location_ref,
+ admin1_code=admin1_code,
+ admin2_code=admin2_code,
+ admin2_name=admin2_name,
+ admin1_ref=admin1_ref,
+ admin_level=admin_level,
+ )
+ return transform_result_to_csv_stream_if_requested(result, output_format, HumanitarianNeedsResponse)
+
+
+## refugees
+
+
+@router.get(
+ '/api/affected-people/refugees',
+ response_model=HapiGenericResponse[RefugeesResponse],
+ summary='Get refugees data',
+ include_in_schema=False,
+)
+@router.get(
+ '/api/v1/affected-people/refugees',
+ response_model=HapiGenericResponse[RefugeesResponse],
+ summary='Get refugees data',
+)
+async def get_refugees(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
+ common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
+ db: AsyncSession = Depends(get_db),
+ population_group: Annotated[Optional[PopulationGroup], Query(max_length=32, description='Population group')] = None,
+ gender: Annotated[Optional[Gender], Query(max_length=1, description=f'{DOC_GENDER}')] = None,
+ age_range: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_AGE_RANGE}')] = None,
+ min_age: Annotated[Optional[int], Query(description='Min age')] = None,
+ max_age: Annotated[Optional[int], Query(description='Max age')] = None,
+ origin_location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
+ ] = None,
+ origin_location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
+ ] = None,
+ asylum_location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
+ ] = None,
+ asylum_location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
+ ] = None,
+ output_format: OutputFormat = OutputFormat.JSON,
+):
+ """
+ Return the list of refugees data
+ """
+ result = await get_refugees_srv(
+ pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ population_group=population_group,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
+ origin_location_code=origin_location_code,
+ origin_location_name=origin_location_name,
+ asylum_location_code=asylum_location_code,
+ asylum_location_name=asylum_location_name,
+ )
+ return transform_result_to_csv_stream_if_requested(result, output_format, RefugeesResponse)
diff --git a/hdx_hapi/endpoints/get_conflict_events.py b/hdx_hapi/endpoints/get_conflict_events.py
new file mode 100644
index 00000000..27292b9f
--- /dev/null
+++ b/hdx_hapi/endpoints/get_conflict_events.py
@@ -0,0 +1,96 @@
+from typing import Annotated, Optional
+from fastapi import APIRouter, Depends, Query
+
+from hapi_schema.utils.enums import EventType
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.config.doc_snippets import (
+ DOC_ADMIN1_CODE,
+ DOC_ADMIN1_NAME,
+ DOC_ADMIN2_CODE,
+ DOC_ADMIN2_NAME,
+ DOC_LOCATION_CODE,
+ DOC_LOCATION_NAME,
+ DOC_SEE_ADMIN1,
+ DOC_SEE_ADMIN2,
+ DOC_SEE_LOC,
+)
+from hdx_hapi.endpoints.models.base import HapiGenericResponse
+from hdx_hapi.endpoints.models.conflict_event import ConflictEventResponse
+from hdx_hapi.endpoints.util.util import (
+ AdminLevel,
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+)
+from hdx_hapi.services.conflict_view_logic import get_conflict_event_srv
+from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
+from hdx_hapi.services.sql_alchemy_session import get_db
+
+
+router = APIRouter(
+ tags=['Conflict Events'],
+)
+
+SUMMARY_TEXT = 'Get the list of conflict events'
+
+
+@router.get(
+ '/api/coordination-context/conflict-event',
+ response_model=HapiGenericResponse[ConflictEventResponse],
+ summary=SUMMARY_TEXT,
+ include_in_schema=False,
+)
+@router.get(
+ '/api/v1/coordination-context/conflict-event',
+ response_model=HapiGenericResponse[ConflictEventResponse],
+ summary=SUMMARY_TEXT,
+)
+async def get_conflict_events(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
+ common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
+ db: AsyncSession = Depends(get_db),
+ event_type: Annotated[Optional[EventType], Query(description='Event type')] = None,
+ location_ref: Annotated[Optional[int], Query(description='Location reference')] = None,
+ location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
+ ] = None,
+ location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
+ ] = None,
+ admin1_ref: Annotated[Optional[int], Query(description='Admin1 reference')] = None,
+ admin1_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')
+ ] = None,
+ admin1_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')
+ ] = None,
+ admin2_ref: Annotated[Optional[int], Query(description='Admin2 reference')] = None,
+ admin2_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')
+ ] = None,
+ admin2_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')
+ ] = None,
+ admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None,
+ output_format: OutputFormat = OutputFormat.JSON,
+):
+ result = await get_conflict_event_srv(
+ pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ event_type=event_type,
+ location_ref=location_ref,
+ location_code=location_code,
+ location_name=location_name,
+ admin1_ref=admin1_ref,
+ admin1_code=admin1_code,
+ admin1_name=admin1_name,
+ admin2_ref=admin2_ref,
+ admin2_code=admin2_code,
+ admin2_name=admin2_name,
+ admin_level=admin_level,
+ )
+ return transform_result_to_csv_stream_if_requested(result, output_format, ConflictEventResponse)
diff --git a/hdx_hapi/endpoints/get_demographic.py b/hdx_hapi/endpoints/get_demographic.py
index e4f63893..e69de29b 100644
--- a/hdx_hapi/endpoints/get_demographic.py
+++ b/hdx_hapi/endpoints/get_demographic.py
@@ -1,80 +0,0 @@
-from typing import Annotated
-from fastapi import Depends, Query, APIRouter
-
-
-from sqlalchemy.ext.asyncio import AsyncSession
-from hdx_hapi.config.doc_snippets import (
- DOC_AGE_RANGE_CODE,
- DOC_AGE_RANGE_SUMMARY,
- DOC_GENDER_CODE,
- DOC_GENDER_DESCRIPTION,
- DOC_GENDER_SUMMARY,
-)
-
-from hdx_hapi.endpoints.models.base import HapiGenericResponse
-from hdx_hapi.endpoints.models.demographic import AgeRangeResponse, GenderResponse
-from hdx_hapi.endpoints.util.util import CommonEndpointParams, common_endpoint_parameters, OutputFormat
-from hdx_hapi.services.age_range_logic import get_age_ranges_srv
-from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
-from hdx_hapi.services.gender_logic import get_genders_srv
-from hdx_hapi.services.sql_alchemy_session import get_db
-
-
-router = APIRouter(
- tags=['Age and Gender Disaggregations'],
-)
-
-
-@router.get(
- '/api/age_range',
- response_model=HapiGenericResponse[AgeRangeResponse],
- summary=f'{DOC_AGE_RANGE_SUMMARY}',
- include_in_schema=False,
-)
-@router.get(
- '/api/v1/age_range', response_model=HapiGenericResponse[AgeRangeResponse], summary=f'{DOC_AGE_RANGE_SUMMARY}'
-)
-async def get_age_ranges(
- common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
- db: AsyncSession = Depends(get_db),
- code: Annotated[
- str, Query(max_length=32, openapi_examples={'20-24': {'value': '20-24'}}, description=f'{DOC_AGE_RANGE_CODE}')
- ] = None,
- output_format: OutputFormat = OutputFormat.JSON,
-):
- """Get the list of age ranges used for disaggregating population data. Age ranges are not standardized across
- different data sources and instead reflect the age range breakdowns provided by the data source.
- """
- result = await get_age_ranges_srv(
- pagination_parameters=common_parameters,
- db=db,
- code=code,
- )
-
- return transform_result_to_csv_stream_if_requested(result, output_format, AgeRangeResponse)
-
-
-@router.get(
- '/api/gender',
- response_model=HapiGenericResponse[GenderResponse],
- summary=f'{DOC_GENDER_SUMMARY}',
- include_in_schema=False,
-)
-@router.get('/api/v1/gender', response_model=HapiGenericResponse[GenderResponse], summary=f'{DOC_GENDER_SUMMARY}')
-async def get_genders(
- common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
- db: AsyncSession = Depends(get_db),
- code: Annotated[
- str, Query(max_length=1, description=f'{DOC_GENDER_CODE}', openapi_examples={'f': {'value': 'f'}})
- ] = None,
- description: Annotated[
- str,
- Query(
- max_length=256, description=f'{DOC_GENDER_DESCRIPTION}', openapi_examples={'female': {'value': 'female'}}
- ),
- ] = None,
- output_format: OutputFormat = OutputFormat.JSON,
-):
- """ """
- result = await get_genders_srv(pagination_parameters=common_parameters, db=db, code=code, description=description)
- return transform_result_to_csv_stream_if_requested(result, output_format, GenderResponse)
diff --git a/hdx_hapi/endpoints/get_encoded_identifier.py b/hdx_hapi/endpoints/get_encoded_identifier.py
index bd759045..b6497e9f 100644
--- a/hdx_hapi/endpoints/get_encoded_identifier.py
+++ b/hdx_hapi/endpoints/get_encoded_identifier.py
@@ -7,20 +7,20 @@
from hdx_hapi.endpoints.util.util import app_name_identifier_query, email_identifier_query
router = APIRouter(
- tags=['Utility'],
+ tags=['Generate App Identifier'],
)
SUMMARY = 'Get an encoded application name plus email'
@router.get(
- '/api/encode_identifier',
+ '/api/encode_app_identifier',
response_model=IdentifierResponse,
summary=SUMMARY,
include_in_schema=False,
)
@router.get(
- '/api/v1/encode_identifier',
+ '/api/v1/encode_app_identifier',
response_model=IdentifierResponse,
summary=SUMMARY,
)
@@ -33,6 +33,5 @@ async def get_encoded_identifier(
"""
encoded_identifier = base64.b64encode(bytes(f'{application}:{email}', 'utf-8'))
- result = {'encoded_identifier': encoded_identifier.decode('utf-8')}
+ result = {'encoded_app_identifier': encoded_identifier.decode('utf-8')}
return result
- # return transform_result_to_csv_stream_if_requested(result, OutputFormat.JSON, IdentifierResponse)
diff --git a/hdx_hapi/endpoints/get_funding.py b/hdx_hapi/endpoints/get_funding.py
new file mode 100644
index 00000000..e8b13541
--- /dev/null
+++ b/hdx_hapi/endpoints/get_funding.py
@@ -0,0 +1,60 @@
+from typing import Annotated, Optional
+from fastapi import APIRouter, Depends, Query
+
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.config.doc_snippets import DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_LOC
+from hdx_hapi.endpoints.models.base import HapiGenericResponse
+from hdx_hapi.endpoints.models.funding import FundingResponse
+from hdx_hapi.endpoints.util.util import (
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+)
+from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
+from hdx_hapi.services.funding_logic import get_funding_srv
+from hdx_hapi.services.sql_alchemy_session import get_db
+
+
+router = APIRouter(
+ tags=['Funding'],
+)
+
+
+@router.get(
+ '/api/coordination-context/funding',
+ response_model=HapiGenericResponse[FundingResponse],
+ summary='Funding endpoint',
+ include_in_schema=False,
+)
+@router.get(
+ '/api/v1/coordination-context/funding',
+ response_model=HapiGenericResponse[FundingResponse],
+ summary='Funding endpoint',
+)
+async def get_fundings(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
+ common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
+ db: AsyncSession = Depends(get_db),
+ appeal_code: Annotated[Optional[str], Query(max_length=32, description='Appeal code')] = None,
+ appeal_type: Annotated[Optional[str], Query(max_length=32, description='Appeal type')] = None,
+ location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
+ ] = None,
+ location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
+ ] = None,
+ output_format: OutputFormat = OutputFormat.JSON,
+):
+ result = await get_funding_srv(
+ pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ appeal_code=appeal_code,
+ appeal_type=appeal_type,
+ location_code=location_code,
+ location_name=location_name,
+ )
+ return transform_result_to_csv_stream_if_requested(result, output_format, FundingResponse)
diff --git a/hdx_hapi/endpoints/get_hdx_metadata.py b/hdx_hapi/endpoints/get_hdx_metadata.py
index 48a9108d..a5fd0359 100644
--- a/hdx_hapi/endpoints/get_hdx_metadata.py
+++ b/hdx_hapi/endpoints/get_hdx_metadata.py
@@ -18,10 +18,6 @@
DOC_SEE_DATASET,
DOC_UPDATE_DATE_MAX,
DOC_UPDATE_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
)
from hdx_hapi.endpoints.models.base import HapiGenericResponse
@@ -42,13 +38,13 @@
@router.get(
- '/api/dataset',
+ '/api/metadata/dataset',
response_model=HapiGenericResponse[DatasetResponse],
summary='Get information about the sources of the data in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/dataset',
+ '/api/v1/metadata/dataset',
response_model=HapiGenericResponse[DatasetResponse],
summary='Get information about the sources of the data in HAPI',
)
@@ -79,13 +75,13 @@ async def get_datasets(
@router.get(
- '/api/resource',
+ '/api/metadata/resource',
response_model=HapiGenericResponse[ResourceResponse],
summary='Get information about the sources of the data in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/resource',
+ '/api/v1/metadata/resource',
response_model=HapiGenericResponse[ResourceResponse],
summary='Get information about the sources of the data in HAPI',
)
@@ -103,22 +99,6 @@ async def get_resources(
Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
] = None,
is_hxl: Annotated[bool, Query(description=f'{DOC_HDX_RESOURCE_HXL}')] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
dataset_hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_DATASET_ID} {DOC_SEE_DATASET} ')] = None,
dataset_hdx_stub: Annotated[
str, Query(max_length=128, description=f'{DOC_HDX_DATASET_NAME} {DOC_SEE_DATASET}')
@@ -142,10 +122,6 @@ async def get_resources(
update_date_min=update_date_min,
update_date_max=update_date_max,
is_hxl=is_hxl,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
dataset_hdx_id=dataset_hdx_id,
dataset_hdx_stub=dataset_hdx_stub,
dataset_title=dataset_title,
diff --git a/hdx_hapi/endpoints/get_humanitarian_needs.py b/hdx_hapi/endpoints/get_humanitarian_needs.py
deleted file mode 100644
index e4eda016..00000000
--- a/hdx_hapi/endpoints/get_humanitarian_needs.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from datetime import date
-from typing import Annotated
-from fastapi import Depends, Query, APIRouter
-from pydantic import NaiveDatetime
-
-
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.config.doc_snippets import (
- DOC_GENDER_CODE,
- DOC_AGE_RANGE_CODE,
- DOC_SECTOR_CODE,
- DOC_SECTOR_NAME,
- DOC_HDX_PROVIDER_STUB,
- DOC_ADMIN1_CODE,
- DOC_ADMIN2_NAME,
- DOC_ADMIN2_CODE,
- DOC_LOCATION_CODE,
- DOC_LOCATION_NAME,
- DOC_SEE_ADMIN1,
- DOC_SEE_LOC,
- DOC_UPDATE_DATE_MAX,
- DOC_UPDATE_DATE_MIN,
- DOC_SEE_ADMIN2,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
-)
-
-from hdx_hapi.endpoints.models.base import HapiGenericResponse
-from hdx_hapi.endpoints.models.humanitarian_needs import HumanitarianNeedsResponse
-from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, OutputFormat, common_endpoint_parameters
-from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
-from hdx_hapi.services.humanitarian_needs_logic import get_humanitarian_needs_srv
-from hdx_hapi.services.sql_alchemy_session import get_db
-
-router = APIRouter(
- tags=['Humanitarian Needs'],
-)
-
-
-@router.get(
- '/api/themes/humanitarian_needs',
- response_model=HapiGenericResponse[HumanitarianNeedsResponse],
- summary='Get humanitarian needs data',
- include_in_schema=False,
-)
-@router.get(
- '/api/v1/themes/humanitarian_needs',
- response_model=HapiGenericResponse[HumanitarianNeedsResponse],
- summary='Get humanitarian needs data',
-)
-async def get_humanitarian_needs(
- common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
- db: AsyncSession = Depends(get_db),
- gender_code: Annotated[str, Query(max_length=1, description=f'{DOC_GENDER_CODE}')] = None,
- age_range_code: Annotated[str, Query(max_length=32, description=f'{DOC_AGE_RANGE_CODE}')] = None,
- disabled_marker: Annotated[bool, Query(description='Disabled marker')] = None,
- sector_code: Annotated[str, Query(max_length=32, description=f'{DOC_SECTOR_CODE}')] = None,
- sector_name: Annotated[str, Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None,
- population_group_code: Annotated[str, Query(max_length=32, description='Population group code')] = None,
- population_status_code: Annotated[str, Query(max_length=32, description='Population status code')] = None,
- population: Annotated[int, Query(description='Population')] = None,
- dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None,
- resource_update_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}),
- ] = None,
- resource_update_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
- ] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
- location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
- location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None,
- admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None,
- location_ref: Annotated[int, Query(description='Location reference')] = None,
- # admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None,
- admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None,
- admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None,
- admin1_ref: Annotated[int, Query(description='Admin1 reference')] = None,
- admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None,
- output_format: OutputFormat = OutputFormat.JSON,
-):
- """
- Return the list of humanitarian needs data
- """
- result = await get_humanitarian_needs_srv(
- pagination_parameters=common_parameters,
- db=db,
- gender_code=gender_code,
- age_range_code=age_range_code,
- disabled_marker=disabled_marker,
- sector_code=sector_code,
- sector_name=sector_name,
- population_group_code=population_group_code,
- population_status_code=population_status_code,
- population=population,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
- location_code=location_code,
- location_name=location_name,
- admin1_code=admin1_code,
- # admin1_name=admin1_name,
- location_ref=location_ref,
- admin2_code=admin2_code,
- admin2_name=admin2_name,
- admin1_ref=admin1_ref,
- admin_level=admin_level,
- )
- return transform_result_to_csv_stream_if_requested(result, output_format, HumanitarianNeedsResponse)
diff --git a/hdx_hapi/endpoints/get_humanitarian_response.py b/hdx_hapi/endpoints/get_humanitarian_response.py
index ca7e94a9..15a5840c 100644
--- a/hdx_hapi/endpoints/get_humanitarian_response.py
+++ b/hdx_hapi/endpoints/get_humanitarian_response.py
@@ -21,6 +21,7 @@
common_endpoint_parameters,
)
from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
+
from hdx_hapi.services.org_logic import get_orgs_srv
from hdx_hapi.services.org_type_logic import get_org_types_srv
from hdx_hapi.services.sector_logic import get_sectors_srv
@@ -33,13 +34,13 @@
@router.get(
- '/api/org',
+ '/api/metadata/org',
response_model=HapiGenericResponse[OrgResponse],
summary='Get the list of organizations represented in the data available in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/org',
+ '/api/v1/metadata/org',
response_model=HapiGenericResponse[OrgResponse],
summary='Get the list of organizations represented in the data available in HAPI',
)
@@ -80,13 +81,13 @@ async def get_orgs(
@router.get(
- '/api/org_type',
+ '/api/metadata/org_type',
response_model=HapiGenericResponse[OrgTypeResponse],
summary='Get information about how organizations are classified in HAPI',
include_in_schema=False,
)
@router.get(
- '/api/v1/org_type',
+ '/api/v1/metadata/org_type',
response_model=HapiGenericResponse[OrgTypeResponse],
summary='Get information about how organizations are classified in HAPI',
)
@@ -112,13 +113,13 @@ async def get_org_types(
@router.get(
- '/api/sector',
+ '/api/metadata/sector',
response_model=HapiGenericResponse[SectorResponse],
summary='Get information about how humanitarian response activities are classified',
include_in_schema=False,
)
@router.get(
- '/api/v1/sector',
+ '/api/v1/metadata/sector',
response_model=HapiGenericResponse[SectorResponse],
summary='Get information about how humanitarian response activities are classified',
)
diff --git a/hdx_hapi/endpoints/get_national_risk.py b/hdx_hapi/endpoints/get_national_risk.py
index 03d80d1d..c89f82bd 100644
--- a/hdx_hapi/endpoints/get_national_risk.py
+++ b/hdx_hapi/endpoints/get_national_risk.py
@@ -1,27 +1,25 @@
-from datetime import date
-from typing import Annotated
+from typing import Annotated, Optional
from fastapi import Depends, Query, APIRouter
-from pydantic import NaiveDatetime
+from hapi_schema.utils.enums import RiskClass
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.config.doc_snippets import (
- DOC_HDX_PROVIDER_STUB,
DOC_LOCATION_CODE,
DOC_LOCATION_NAME,
DOC_SEE_LOC,
- DOC_UPDATE_DATE_MAX,
- DOC_UPDATE_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
)
from hdx_hapi.endpoints.models.base import HapiGenericResponse
from hdx_hapi.endpoints.models.national_risk import NationalRiskResponse
-from hdx_hapi.endpoints.util.util import CommonEndpointParams, OutputFormat, common_endpoint_parameters
+from hdx_hapi.endpoints.util.util import (
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+)
from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
from hdx_hapi.services.national_risk_logic import get_national_risks_srv
from hdx_hapi.services.sql_alchemy_session import get_db
@@ -32,53 +30,37 @@
@router.get(
- '/api/themes/national_risk',
+ '/api/coordination-context/national-risk',
response_model=HapiGenericResponse[NationalRiskResponse],
summary='Get national risk data',
include_in_schema=False,
)
@router.get(
- '/api/v1/themes/national_risk',
+ '/api/v1/coordination-context/national-risk',
response_model=HapiGenericResponse[NationalRiskResponse],
summary='Get national risk data',
)
async def get_national_risks(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
db: AsyncSession = Depends(get_db),
- risk_class: Annotated[int, Query(description='Risk class')] = None,
- global_rank: Annotated[int, Query(description='Global rank')] = None,
- overall_risk: Annotated[float, Query(description='Overall risk')] = None,
- hazard_exposure_risk: Annotated[float, Query(description='Hazard exposure risk')] = None,
- vulnerability_risk: Annotated[float, Query(description='Vulnerability risk')] = None,
- coping_capacity_risk: Annotated[float, Query(description='Coping capacity risk')] = None,
- dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None,
- resource_update_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}),
- ] = None,
- resource_update_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
- ] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
+ risk_class: Annotated[Optional[RiskClass], Query(description='Risk class')] = None,
+ global_rank_min: Annotated[Optional[int], Query(description='Global rank, lower bound')] = None,
+ global_rank_max: Annotated[Optional[int], Query(description='Global rank, upper bound')] = None,
+ overall_risk_min: Annotated[Optional[float], Query(description='Overall risk, lower bound')] = None,
+ overall_risk_max: Annotated[Optional[float], Query(description='Overall risk, upper bound')] = None,
+ hazard_exposure_risk_min: Annotated[Optional[float], Query(description='Hazard exposure risk, lower bound')] = None,
+ hazard_exposure_risk_max: Annotated[Optional[float], Query(description='Hazard exposure risk, upper bound')] = None,
+ vulnerability_risk_min: Annotated[Optional[float], Query(description='Vulnerability risk, lower bound')] = None,
+ vulnerability_risk_max: Annotated[Optional[float], Query(description='Vulnerability risk, upper bound')] = None,
+ coping_capacity_risk_min: Annotated[Optional[float], Query(description='Coping capacity risk, lower bound')] = None,
+ coping_capacity_risk_max: Annotated[Optional[float], Query(description='Coping capacity risk, upper bound')] = None,
+ location_code: Annotated[
+ Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')
] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
+ location_name: Annotated[
+ Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')
] = None,
- # sector_name: Annotated[str, Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None,
- location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
- location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None,
output_format: OutputFormat = OutputFormat.JSON,
):
"""
@@ -86,21 +68,19 @@ async def get_national_risks(
"""
result = await get_national_risks_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
risk_class=risk_class,
- global_rank=global_rank,
- overall_risk=overall_risk,
- hazard_exposure_risk=hazard_exposure_risk,
- vulnerability_risk=vulnerability_risk,
- coping_capacity_risk=coping_capacity_risk,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
- # sector_name=sector_name,
+ global_rank_min=global_rank_min,
+ global_rank_max=global_rank_max,
+ overall_risk_min=overall_risk_min,
+ overall_risk_max=overall_risk_max,
+ hazard_exposure_risk_min=hazard_exposure_risk_min,
+ hazard_exposure_risk_max=hazard_exposure_risk_max,
+ vulnerability_risk_min=vulnerability_risk_min,
+ vulnerability_risk_max=vulnerability_risk_max,
+ coping_capacity_risk_min=coping_capacity_risk_min,
+ coping_capacity_risk_max=coping_capacity_risk_max,
location_code=location_code,
location_name=location_name,
)
diff --git a/hdx_hapi/endpoints/get_operational_presence.py b/hdx_hapi/endpoints/get_operational_presence.py
index 25892820..c743e704 100644
--- a/hdx_hapi/endpoints/get_operational_presence.py
+++ b/hdx_hapi/endpoints/get_operational_presence.py
@@ -1,7 +1,5 @@
-from datetime import date
from typing import Annotated
from fastapi import Depends, Query, APIRouter
-from pydantic import NaiveDatetime
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.config.doc_snippets import (
@@ -14,15 +12,22 @@
DOC_SEE_ADMIN1,
DOC_SEE_ADMIN2,
DOC_SEE_LOC,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
+ # DOC_HAPI_UPDATED_DATE_MIN,
+ # DOC_HAPI_UPDATED_DATE_MAX,
+ # DOC_HAPI_REPLACED_DATE_MIN,
+ # DOC_HAPI_REPLACED_DATE_MAX,
)
from hdx_hapi.endpoints.models.base import HapiGenericResponse
from hdx_hapi.endpoints.models.operational_presence import OperationalPresenceResponse
-from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, OutputFormat, common_endpoint_parameters
+from hdx_hapi.endpoints.util.util import (
+ AdminLevel,
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+)
from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
from hdx_hapi.services.operational_presence_logic import get_operational_presences_srv
from hdx_hapi.services.sql_alchemy_session import get_db
@@ -31,36 +36,22 @@
tags=['3W Operational Presence'],
)
-SUMMARY_TEXT = (
- 'Get the list of organizations present and in which humanitarian sectors they are working. '
- "There are two versions of this endpoint to support the uppercase and lowercase 'w'"
-)
+SUMMARY_TEXT = 'Get the list of organizations present and in which humanitarian sectors they are working.'
@router.get(
- '/api/themes/3w',
+ '/api/coordination-context/operational-presence',
response_model=HapiGenericResponse[OperationalPresenceResponse],
summary=SUMMARY_TEXT,
include_in_schema=False,
)
@router.get(
- '/api/themes/3W',
- response_model=HapiGenericResponse[OperationalPresenceResponse],
- summary=SUMMARY_TEXT,
- include_in_schema=False,
-)
-@router.get(
- '/api/v1/themes/3w',
- response_model=HapiGenericResponse[OperationalPresenceResponse],
- summary=SUMMARY_TEXT,
-)
-@router.get(
- '/api/v1/themes/3W',
+ '/api/v1/coordination-context/operational-presence',
response_model=HapiGenericResponse[OperationalPresenceResponse],
summary=SUMMARY_TEXT,
- include_in_schema=False,
)
async def get_operational_presences(
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
db: AsyncSession = Depends(get_db),
sector_code: Annotated[
@@ -111,64 +102,65 @@ async def get_operational_presences(
),
),
] = None,
+ location_ref: Annotated[int, Query(description='Location reference')] = None,
location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None,
+ admin1_ref: Annotated[int, Query(description='Admin1 reference')] = None,
admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None,
admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None,
- location_ref: Annotated[int, Query(description='Location reference')] = None,
# admin1_is_unspecified: Annotated[bool, Query(description='Location Adm1 is not specified')] = None,
+ admin2_ref: Annotated[int, Query(description='Admin2 reference')] = None,
admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None,
admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None,
- admin1_ref: Annotated[int, Query(description='Admin1 reference')] = None,
admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None,
# admin2_is_unspecified: Annotated[bool, Query(description='Location Adm2 is not specified')] = None,
- resource_update_date_min: Annotated[
- NaiveDatetime | date,
- Query(
- description=(
- 'Filter the response to data updated on or after this date. '
- 'For example 2020-01-01 or 2020-01-01T00:00:00'
- ),
- openapi_examples={'2020-01-01': {'value': '2020-01-01'}},
- ),
- ] = None,
- resource_update_date_max: Annotated[
- NaiveDatetime | date,
- Query(
- description=(
- 'Filter the response to data updated on or before this date. '
- 'For example 2024-12-31 or 2024-12-31T23:59:59'
- ),
- openapi_examples={'2024-12-31': {'value': '2024-12-31'}},
- ),
- ] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
- dataset_hdx_provider_stub: Annotated[
- str,
- Query(
- max_length=128,
- description=(
- 'Filter the query by the organizations contributing the source data to HDX. '
- 'If you want to filter by the organization mentioned in the operational presence record, '
- 'see the org_name and org_acronym parameters below.'
- ),
- ),
- ] = None,
+ # resource_update_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(
+ # description=(
+ # 'Filter the response to data updated on or after this date. '
+ # 'For example 2020-01-01 or 2020-01-01T00:00:00'
+ # ),
+ # openapi_examples={'2020-01-01': {'value': '2020-01-01'}},
+ # ),
+ # ] = None,
+ # resource_update_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(
+ # description=(
+ # 'Filter the response to data updated on or before this date. '
+ # 'For example 2024-12-31 or 2024-12-31T23:59:59'
+ # ),
+ # openapi_examples={'2024-12-31': {'value': '2024-12-31'}},
+ # ),
+ # ] = None,
+ # hapi_updated_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
+ # ] = None,
+ # hapi_updated_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
+ # ] = None,
+ # hapi_replaced_date_min: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
+ # ] = None,
+ # hapi_replaced_date_max: Annotated[
+ # NaiveDatetime | date,
+ # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
+ # ] = None,
+ # dataset_hdx_provider_stub: Annotated[
+ # str,
+ # Query(
+ # max_length=128,
+ # description=(
+ # 'Filter the query by the organizations contributing the source data to HDX. '
+ # 'If you want to filter by the organization mentioned in the operational presence record, '
+ # 'see the org_name and org_acronym parameters below.'
+ # ),
+ # ),
+ # ] = None,
# org_ref: Annotated[int, Query(ge=1, description='Organization reference')] = None,
# dataset_hdx_id: Annotated[str, Query(max_length=36, description='HDX Dataset ID')] = None,
# dataset_hdx_stub: Annotated[str, Query(max_length=128, description='HDX Dataset Name')] = None,
@@ -188,15 +180,16 @@ async def get_operational_presences(
"""
result = await get_operational_presences_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
sector_code=sector_code,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
+ # dataset_hdx_provider_stub=dataset_hdx_provider_stub,
+ # resource_update_date_min=resource_update_date_min,
+ # resource_update_date_max=resource_update_date_max,
+ # hapi_updated_date_min=hapi_updated_date_min,
+ # hapi_updated_date_max=hapi_updated_date_max,
+ # hapi_replaced_date_min=hapi_replaced_date_min,
+ # hapi_replaced_date_max=hapi_replaced_date_max,
org_acronym=org_acronym,
org_name=org_name,
sector_name=sector_name,
@@ -206,6 +199,7 @@ async def get_operational_presences(
admin1_name=admin1_name,
location_ref=location_ref,
# admin1_is_unspecified=admin1_is_unspecified,
+ admin2_ref=admin2_ref,
admin2_code=admin2_code,
admin2_name=admin2_name,
admin1_ref=admin1_ref,
diff --git a/hdx_hapi/endpoints/get_population.py b/hdx_hapi/endpoints/get_population.py
index a1a5ea35..8de19c5a 100644
--- a/hdx_hapi/endpoints/get_population.py
+++ b/hdx_hapi/endpoints/get_population.py
@@ -1,26 +1,26 @@
-from datetime import date
from typing import Annotated
from fastapi import Depends, Query, APIRouter
-from pydantic import NaiveDatetime
from sqlalchemy.ext.asyncio import AsyncSession
+from hapi_schema.utils.enums import Gender
from hdx_hapi.config.doc_snippets import (
DOC_LOCATION_CODE,
DOC_LOCATION_NAME,
DOC_SEE_LOC,
- DOC_UPDATE_DATE_MAX,
- DOC_UPDATE_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MIN,
- DOC_HAPI_UPDATED_DATE_MAX,
- DOC_HAPI_REPLACED_DATE_MIN,
- DOC_HAPI_REPLACED_DATE_MAX,
)
from hdx_hapi.endpoints.models.base import HapiGenericResponse
from hdx_hapi.endpoints.models.population import PopulationResponse
-from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, OutputFormat, common_endpoint_parameters
+from hdx_hapi.endpoints.util.util import (
+ CommonEndpointParams,
+ OutputFormat,
+ ReferencePeriodParameters,
+ common_endpoint_parameters,
+ reference_period_parameters,
+ AdminLevel,
+)
from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
from hdx_hapi.services.population_logic import get_populations_srv
from hdx_hapi.services.sql_alchemy_session import get_db
@@ -31,56 +31,35 @@
@router.get(
- '/api/themes/population',
+ '/api/population-social/population',
response_model=HapiGenericResponse[PopulationResponse],
summary='Get baseline population data',
include_in_schema=False,
)
@router.get(
- '/api/v1/themes/population',
+ '/api/v1/population-social/population',
response_model=HapiGenericResponse[PopulationResponse],
summary='Get baseline population data',
)
async def get_populations(
common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
+ ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)],
db: AsyncSession = Depends(get_db),
- gender_code: Annotated[str, Query(max_length=1, description='Gender code')] = None,
- age_range_code: Annotated[str, Query(max_length=32, description='Age range code')] = None,
+ gender: Annotated[Gender, Query(description='Gender')] = None,
+ age_range: Annotated[str, Query(max_length=32, description='Age range')] = None,
+ min_age: Annotated[int, Query(description='Minimum age')] = None,
+ max_age: Annotated[int, Query(description='Maximum age')] = None,
population: Annotated[int, Query(description='Population')] = None,
- dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description='Organization(provider) code')] = None,
- resource_update_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}),
- ] = None,
- resource_update_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
- ] = None,
- hapi_updated_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
- ] = None,
- hapi_updated_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
- ] = None,
- hapi_replaced_date_min: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
- ] = None,
- hapi_replaced_date_max: Annotated[
- NaiveDatetime | date,
- Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
- ] = None,
+ admin1_ref: Annotated[int, Query(description='Admin1 reference')] = None,
+ location_ref: Annotated[int, Query(description='Location reference')] = None,
location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None,
location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None,
admin1_name: Annotated[str, Query(max_length=512, description='Admin1 name')] = None,
admin1_code: Annotated[str, Query(max_length=128, description='Admin1 code')] = None,
# admin1_is_unspecified: Annotated[bool, Query(description='Is admin1 specified or not')] = None,
- location_ref: Annotated[int, Query(description='Location reference')] = None,
+ admin2_ref: Annotated[int, Query(description='Admin2 reference')] = None,
admin2_name: Annotated[str, Query(max_length=512, description='Admin2 name')] = None,
admin2_code: Annotated[str, Query(max_length=128, description='Admin2 code')] = None,
- admin1_ref: Annotated[int, Query(description='Admin1 reference')] = None,
admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None,
# admin2_is_unspecified: Annotated[bool, Query(description='Is admin2 specified or not')] = None,
output_format: OutputFormat = OutputFormat.JSON,
@@ -90,27 +69,22 @@ async def get_populations(
"""
result = await get_populations_srv(
pagination_parameters=common_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
- gender_code=gender_code,
- age_range_code=age_range_code,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
population=population,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
+ admin1_ref=admin1_ref,
+ location_ref=location_ref,
location_code=location_code,
location_name=location_name,
admin1_name=admin1_name,
admin1_code=admin1_code,
- location_ref=location_ref,
- # admin1_is_unspecified=admin1_is_unspecified,
- admin2_code=admin2_code,
+ admin2_ref=admin2_ref,
admin2_name=admin2_name,
- admin1_ref=admin1_ref,
+ admin2_code=admin2_code,
admin_level=admin_level,
- # admin2_is_unspecified=admin2_is_unspecified,
)
return transform_result_to_csv_stream_if_requested(result, output_format, PopulationResponse)
diff --git a/hdx_hapi/endpoints/get_population_profile.py b/hdx_hapi/endpoints/get_population_profile.py
deleted file mode 100644
index 91dc31f5..00000000
--- a/hdx_hapi/endpoints/get_population_profile.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from typing import Annotated
-from fastapi import Depends, Query, APIRouter
-
-
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.endpoints.models.base import HapiGenericResponse
-from hdx_hapi.endpoints.models.population_profile import PopulationGroupResponse, PopulationStatusResponse
-from hdx_hapi.endpoints.util.util import CommonEndpointParams, OutputFormat, common_endpoint_parameters
-from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested
-from hdx_hapi.services.population_group_logic import get_population_groups_srv
-from hdx_hapi.services.population_status_logic import get_population_statuses_srv
-from hdx_hapi.services.sql_alchemy_session import get_db
-
-router = APIRouter(
- tags=['Population Groups and Statuses'],
-)
-
-
-@router.get(
- '/api/population_group',
- response_model=HapiGenericResponse[PopulationGroupResponse],
- summary='Get population groups data',
- include_in_schema=False,
-)
-@router.get(
- '/api/v1/population_group',
- response_model=HapiGenericResponse[PopulationGroupResponse],
- summary='Get population groups data',
-)
-async def get_population_groups(
- common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
- db: AsyncSession = Depends(get_db),
- code: Annotated[str, Query(max_length=32, description='Population group code')] = None,
- description: Annotated[str, Query(max_length=512, description='Population group description')] = None,
- output_format: OutputFormat = OutputFormat.JSON,
-):
- """
- Return the list of population groups
- """
- result = await get_population_groups_srv(
- pagination_parameters=common_parameters,
- db=db,
- code=code,
- description=description,
- )
- return transform_result_to_csv_stream_if_requested(result, output_format, PopulationGroupResponse)
-
-
-@router.get(
- '/api/population_status',
- response_model=HapiGenericResponse[PopulationStatusResponse],
- summary='Get population statuses data',
- include_in_schema=False,
-)
-@router.get(
- '/api/v1/population_status',
- response_model=HapiGenericResponse[PopulationStatusResponse],
- summary='Get population statuses data',
-)
-async def get_population_statuses(
- common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)],
- db: AsyncSession = Depends(get_db),
- code: Annotated[str, Query(max_length=32, description='Population status code')] = None,
- description: Annotated[str, Query(max_length=512, description='Population status description')] = None,
- output_format: OutputFormat = OutputFormat.JSON,
-):
- """
- Return the list of population statuses
- """
- result = await get_population_statuses_srv(
- pagination_parameters=common_parameters,
- db=db,
- code=code,
- description=description,
- )
- return transform_result_to_csv_stream_if_requested(result, output_format, PopulationStatusResponse)
diff --git a/hdx_hapi/endpoints/middleware/app_identifier_middleware.py b/hdx_hapi/endpoints/middleware/app_identifier_middleware.py
index d1dedade..c4fb05fb 100644
--- a/hdx_hapi/endpoints/middleware/app_identifier_middleware.py
+++ b/hdx_hapi/endpoints/middleware/app_identifier_middleware.py
@@ -15,8 +15,8 @@
ALLOWED_API_ENDPOINTS = {
- '/api/v1/encode_identifier',
- '/api/encode_identifier',
+ '/api/v1/encode_app_identifier',
+ '/api/encode_app_identifier',
}
diff --git a/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py b/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py
index 438e2e2c..91c372fb 100644
--- a/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py
+++ b/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py
@@ -20,17 +20,14 @@ async def mixpanel_tracking_middleware(request: Request, call_next):
response = await call_next(request)
- if request.url.path.startswith('/api'):
- if CONFIG.HDX_MIXPANEL_TOKEN:
+
+ if CONFIG.MIXPANEL:
+ if request.url.path.startswith('/api'):
background_tasks.add_task(track_api_call, request, response)
- else:
- logger.warning('HDX_MIXPANEL_TOKEN environment variable is not set.')
-
- if request.url.path.startswith('/docs'):
- if CONFIG.HDX_MIXPANEL_TOKEN:
+ elif request.url.path.startswith('/docs'):
background_tasks.add_task(track_page_view, request, response)
- else:
- logger.warning('HDX_MIXPANEL_TOKEN environment variable is not set.')
+ else:
+ logger.warning('HDX_MIXPANEL_TOKEN environment variable is not set.')
response.background = background_tasks
return response
diff --git a/hdx_hapi/endpoints/middleware/util/__init__.py b/hdx_hapi/endpoints/middleware/util/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/hdx_hapi/endpoints/middleware/util/util.py b/hdx_hapi/endpoints/middleware/util/util.py
index 55e7bf04..05db8640 100644
--- a/hdx_hapi/endpoints/middleware/util/util.py
+++ b/hdx_hapi/endpoints/middleware/util/util.py
@@ -4,11 +4,13 @@
import ua_parser.user_agent_parser as useragent
from fastapi import Request, Response
-from hdx_hapi.config.config import mixpanel
+from hdx_hapi.config.config import get_config
logger = logging.getLogger(__name__)
+_CONFIG = get_config()
+
async def track_api_call(request: Request, response: Response):
current_url = str(request.url)
@@ -78,7 +80,7 @@ async def track_page_view(request: Request, response: Response):
async def send_mixpanel_event(event_name: str, distinct_id: str, event_data: dict):
- mixpanel.track(distinct_id, event_name, event_data)
+ _CONFIG.MIXPANEL.track(distinct_id, event_name, event_data)
class HashCodeGenerator(object):
diff --git a/hdx_hapi/endpoints/models/admin_level.py b/hdx_hapi/endpoints/models/admin_level.py
index 33afc271..db355a91 100644
--- a/hdx_hapi/endpoints/models/admin_level.py
+++ b/hdx_hapi/endpoints/models/admin_level.py
@@ -9,6 +9,9 @@ class LocationResponse(HapiBaseModel):
code: str = Field(max_length=128)
name: str = Field(max_length=512)
+ reference_period_start: datetime
+ reference_period_end: Optional[datetime]
+
model_config = ConfigDict(from_attributes=True)
@@ -17,8 +20,10 @@ class Admin1Response(HapiBaseModel):
# location_ref: int
code: str = Field(max_length=128)
name: str = Field(max_length=512)
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
+ # hapi_updated_date: datetime
+ # hapi_replaced_date: Optional[datetime]
+ reference_period_start: datetime
+ reference_period_end: Optional[datetime]
location_code: str = Field(max_length=128)
location_name: str = Field(max_length=512)
@@ -30,8 +35,10 @@ class Admin2Response(HapiBaseModel):
# admin1_ref: int
code: str = Field(max_length=128)
name: str = Field(max_length=512)
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
+ # hapi_updated_date: datetime
+ # hapi_replaced_date: Optional[datetime]
+ reference_period_start: datetime
+ reference_period_end: Optional[datetime]
admin1_code: str = Field(max_length=128)
admin1_name: str = Field(max_length=512)
diff --git a/hdx_hapi/endpoints/models/base.py b/hdx_hapi/endpoints/models/base.py
index d9689ffd..677cc5e4 100644
--- a/hdx_hapi/endpoints/models/base.py
+++ b/hdx_hapi/endpoints/models/base.py
@@ -1,5 +1,6 @@
-from typing import Generic, List, TypeVar
-from pydantic import BaseModel, ConfigDict
+from typing import Generic, List, Optional, TypeVar
+from typing_extensions import Self
+from pydantic import BaseModel, ConfigDict, Field, model_validator
class HapiBaseModel(BaseModel):
@@ -7,6 +8,39 @@ def list_of_fields(self) -> List[str]:
return list(self.model_fields.keys())
+class HapiModelWithAdmins(BaseModel):
+ location_ref: int
+ location_code: str = Field(max_length=128)
+ location_name: str = Field(max_length=512)
+
+ admin1_is_unspecified: bool = Field(exclude=True)
+ admin2_is_unspecified: bool = Field(exclude=True)
+
+ admin1_ref: int
+ admin1_code: Optional[str] = Field(max_length=128)
+ admin1_name: Optional[str] = Field(max_length=512)
+ admin2_ref: int
+ admin2_code: Optional[str] = Field(max_length=128)
+ admin2_name: Optional[str] = Field(max_length=512)
+
+ @model_validator(mode='after') # type: ignore
+ def set_admin1_admin2_null(self) -> Self:
+ admin1_is_unspecified = self.admin1_is_unspecified
+ admin2_is_unspecified = self.admin2_is_unspecified
+
+ # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None
+ if admin1_is_unspecified:
+ self.admin1_code = None
+ self.admin1_name = None
+
+ # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None
+ if admin2_is_unspecified:
+ self.admin2_code = None
+ self.admin2_name = None
+
+ return self
+
+
DataT = TypeVar('DataT')
diff --git a/hdx_hapi/endpoints/models/conflict_event.py b/hdx_hapi/endpoints/models/conflict_event.py
new file mode 100644
index 00000000..72885f04
--- /dev/null
+++ b/hdx_hapi/endpoints/models/conflict_event.py
@@ -0,0 +1,17 @@
+from hapi_schema.utils.enums import EventType
+from pydantic import ConfigDict, Field, NaiveDatetime
+from typing import Optional
+
+from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins
+
+
+class ConflictEventResponse(HapiBaseModel, HapiModelWithAdmins):
+ resource_hdx_id: str = Field(max_length=36)
+ event_type: EventType
+ events: Optional[int]
+ fatalities: Optional[int]
+
+ reference_period_start: NaiveDatetime
+ reference_period_end: Optional[NaiveDatetime]
+
+ model_config = ConfigDict(from_attributes=True)
diff --git a/hdx_hapi/endpoints/models/demographic.py b/hdx_hapi/endpoints/models/demographic.py
deleted file mode 100644
index 4811dca0..00000000
--- a/hdx_hapi/endpoints/models/demographic.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from pydantic import ConfigDict, Field
-from typing import Optional
-
-from hdx_hapi.endpoints.models.base import HapiBaseModel
-
-
-class AgeRangeResponse(HapiBaseModel):
- code: str = Field(max_length=32)
- age_min: int = None
- age_max: Optional[int] = None
-
- model_config = ConfigDict(from_attributes=True)
-
-
-class GenderResponse(HapiBaseModel):
- code: str = Field(max_length=1)
- description: str = Field(max_length=256)
-
- model_config = ConfigDict(from_attributes=True)
diff --git a/hdx_hapi/endpoints/models/encoded_identifier.py b/hdx_hapi/endpoints/models/encoded_identifier.py
index cbb7555f..db589f25 100644
--- a/hdx_hapi/endpoints/models/encoded_identifier.py
+++ b/hdx_hapi/endpoints/models/encoded_identifier.py
@@ -3,4 +3,4 @@
class IdentifierResponse(HapiBaseModel):
- encoded_identifier: str = Field(max_length=512)
+ encoded_app_identifier: str = Field(max_length=512)
diff --git a/hdx_hapi/endpoints/models/food_security.py b/hdx_hapi/endpoints/models/food_security.py
index 3c5aaccc..ec784511 100644
--- a/hdx_hapi/endpoints/models/food_security.py
+++ b/hdx_hapi/endpoints/models/food_security.py
@@ -1,11 +1,11 @@
from datetime import datetime
-from pydantic import ConfigDict, Field, model_validator, NaiveDatetime
+from pydantic import ConfigDict, Field, NaiveDatetime
from typing import Optional
-from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins
-class FoodSecurityResponse(HapiBaseModel):
+class FoodSecurityResponse(HapiBaseModel, HapiModelWithAdmins):
population_in_phase: int
population_fraction_in_phase: float
@@ -22,34 +22,4 @@ class FoodSecurityResponse(HapiBaseModel):
hapi_updated_date: datetime
hapi_replaced_date: Optional[datetime]
- location_code: str = Field(max_length=128)
- location_name: str = Field(max_length=512)
-
- admin1_is_unspecified: bool = Field(exclude=True)
- admin2_is_unspecified: bool = Field(exclude=True)
-
- admin1_code: Optional[str] = Field(max_length=128)
- admin1_name: Optional[str] = Field(max_length=512)
- location_ref: int = None
- admin2_code: Optional[str] = Field(max_length=128)
- admin2_name: Optional[str] = Field(max_length=512)
- admin1_ref: int = None
-
model_config = ConfigDict(from_attributes=True)
-
- @model_validator(mode='after')
- def set_admin1_admin2_null(self) -> 'FoodSecurityResponse':
- admin1_is_unspecified = self.admin1_is_unspecified
- admin2_is_unspecified = self.admin2_is_unspecified
-
- # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None
- if admin1_is_unspecified:
- self.admin1_code = None
- self.admin1_name = None
-
- # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None
- if admin2_is_unspecified:
- self.admin2_code = None
- self.admin2_name = None
-
- return self
diff --git a/hdx_hapi/endpoints/models/funding.py b/hdx_hapi/endpoints/models/funding.py
new file mode 100644
index 00000000..0a611cb0
--- /dev/null
+++ b/hdx_hapi/endpoints/models/funding.py
@@ -0,0 +1,25 @@
+from pydantic import ConfigDict, Field, NaiveDatetime
+from typing import Optional
+
+from hdx_hapi.endpoints.models.base import HapiBaseModel
+
+
+class FundingResponse(HapiBaseModel):
+ resource_hdx_id: str = Field(max_length=36)
+
+ appeal_code: str = Field(max_length=32)
+ appeal_name: str = Field(max_length=256)
+ appeal_type: str = Field(max_length=32)
+
+ requirements_usd: float = Field(ge=0.0)
+ funding_usd: float = Field(ge=0.0)
+ funding_pct: float = Field(ge=0.0)
+
+ location_ref: int
+ location_code: str = Field(max_length=128)
+ location_name: str = Field(max_length=512)
+
+ reference_period_start: NaiveDatetime
+ reference_period_end: Optional[NaiveDatetime]
+
+ model_config = ConfigDict(from_attributes=True)
diff --git a/hdx_hapi/endpoints/models/hdx_metadata.py b/hdx_hapi/endpoints/models/hdx_metadata.py
index 93b25596..338998f3 100644
--- a/hdx_hapi/endpoints/models/hdx_metadata.py
+++ b/hdx_hapi/endpoints/models/hdx_metadata.py
@@ -1,4 +1,4 @@
-from typing import List, Optional
+from typing import List
from datetime import datetime
from pydantic import ConfigDict, Field, HttpUrl, computed_field
from hdx_hapi.endpoints.models.base import HapiBaseModel
@@ -7,6 +7,8 @@
get_resource_api_url,
get_dataset_url,
get_dataset_api_url,
+ get_organization_url,
+ get_organization_api_url,
)
@@ -29,26 +31,37 @@ def hdx_link(self) -> HttpUrl:
def hdx_api_link(self) -> HttpUrl:
return get_dataset_api_url(dataset_id=self.hdx_id)
+ @computed_field
+ @property
+ def provider_hdx_link(self) -> HttpUrl:
+ return get_organization_url(org_id=self.hdx_provider_stub)
+
+ model_config = ConfigDict(from_attributes=True)
+
+ @computed_field
+ @property
+ def provider_hdx_api_link(self) -> HttpUrl:
+ return get_organization_api_url(org_id=self.hdx_provider_stub)
+
model_config = ConfigDict(from_attributes=True)
def list_of_fields(self) -> List[str]:
fields = super().list_of_fields()
- fields.extend(['hdx_link', 'api_link'])
+ fields.extend(['hdx_link', 'api_link', 'provider_hdx_link', 'provider_hdx_api_link'])
return fields
class ResourceResponse(HapiBaseModel):
# id: int
hdx_id: str = Field(max_length=36)
+ dataset_hdx_id: str = Field(max_length=36)
name: str = Field(max_length=256)
format: str = Field(max_length=32)
update_date: datetime
is_hxl: bool
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
download_url: HttpUrl
+ hapi_updated_date: datetime
- dataset_hdx_id: str = Field(max_length=36)
dataset_hdx_stub: str = Field(max_length=128)
dataset_title: str = Field(max_length=1024)
@@ -77,9 +90,19 @@ def dataset_hdx_link(self) -> HttpUrl:
def dataset_hdx_api_link(self) -> HttpUrl:
return get_dataset_api_url(dataset_id=self.dataset_hdx_id)
+ @computed_field
+ @property
+ def provider_hdx_link(self) -> HttpUrl:
+ return get_organization_url(org_id=self.dataset_hdx_provider_stub)
+
+ @computed_field
+ @property
+ def provider_hdx_api_link(self) -> HttpUrl:
+ return get_organization_api_url(org_id=self.dataset_hdx_provider_stub)
+
model_config = ConfigDict(from_attributes=True)
def list_of_fields(self) -> List[str]:
fields = super().list_of_fields()
- fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_api_link'])
+ fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_hdx_api_link'])
return fields
diff --git a/hdx_hapi/endpoints/models/humanitarian_needs.py b/hdx_hapi/endpoints/models/humanitarian_needs.py
index 07d20f91..fa6c1a52 100644
--- a/hdx_hapi/endpoints/models/humanitarian_needs.py
+++ b/hdx_hapi/endpoints/models/humanitarian_needs.py
@@ -1,58 +1,23 @@
-from datetime import datetime
-from pydantic import ConfigDict, Field, model_validator, NaiveDatetime
+from pydantic import ConfigDict, Field, NaiveDatetime
from typing import Optional
-from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins
+from hapi_schema.utils.enums import Gender, PopulationGroup, PopulationStatus, DisabledMarker
-class HumanitarianNeedsResponse(HapiBaseModel):
- gender_code: Optional[str] = Field(max_length=1)
- age_range_code: Optional[str] = Field(max_length=32)
- disabled_marker: Optional[bool] = None
- sector_code: Optional[str] = Field(max_length=32)
- sector_name: Optional[str] = Field(max_length=512)
- population_status_code: Optional[str] = Field(max_length=32)
- population_group_code: Optional[str] = Field(max_length=32)
- population: int = None
-
- reference_period_start: Optional[NaiveDatetime]
- reference_period_end: Optional[NaiveDatetime]
-
- dataset_hdx_stub: str = Field(max_length=128)
- dataset_hdx_provider_stub: str = Field(max_length=128)
+class HumanitarianNeedsResponse(HapiBaseModel, HapiModelWithAdmins):
resource_hdx_id: str = Field(max_length=36)
-
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
-
- location_code: str = Field(max_length=128)
- location_name: str = Field(max_length=512)
-
- admin1_is_unspecified: bool = Field(exclude=True)
- admin2_is_unspecified: bool = Field(exclude=True)
-
- admin1_code: Optional[str] = Field(max_length=128)
- admin1_name: Optional[str] = Field(max_length=512)
- location_ref: int = None
- admin2_code: Optional[str] = Field(max_length=128)
- admin2_name: Optional[str] = Field(max_length=512)
- admin1_ref: int = None
+ gender: Gender
+ age_range: str = Field(max_length=32)
+ min_age: Optional[int] = Field(ge=0)
+ max_age: Optional[int] = Field(ge=0)
+ disabled_marker: DisabledMarker
+ sector_code: str = Field(max_length=32)
+ population_group: PopulationGroup
+ population_status: PopulationStatus
+ population: int = Field(ge=0)
+ reference_period_start: NaiveDatetime
+ reference_period_end: Optional[NaiveDatetime]
+ sector_name: Optional[str] = Field(max_length=512)
model_config = ConfigDict(from_attributes=True)
-
- @model_validator(mode='after')
- def set_admin1_admin2_null(self) -> 'HumanitarianNeedsResponse':
- admin1_is_unspecified = self.admin1_is_unspecified
- admin2_is_unspecified = self.admin2_is_unspecified
-
- # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None
- if admin1_is_unspecified:
- self.admin1_code = None
- self.admin1_name = None
-
- # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None
- if admin2_is_unspecified:
- self.admin2_code = None
- self.admin2_name = None
-
- return self
diff --git a/hdx_hapi/endpoints/models/national_risk.py b/hdx_hapi/endpoints/models/national_risk.py
index a73d984c..258c8a9d 100644
--- a/hdx_hapi/endpoints/models/national_risk.py
+++ b/hdx_hapi/endpoints/models/national_risk.py
@@ -1,29 +1,32 @@
-from datetime import datetime
+from hapi_schema.utils.enums import RiskClass
from pydantic import ConfigDict, Field, NaiveDatetime
from typing import Optional
from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hdx_hapi.endpoints.models.util.constants import PERCENTAGE_TYPE
+
+RISK_TYPE = Field(ge=0, le=10)
class NationalRiskResponse(HapiBaseModel):
- risk_class: int
- global_rank: int
- overall_risk: float
- hazard_exposure_risk: float
- vulnerability_risk: float
- coping_capacity_risk: float
+ risk_class: RiskClass
+ global_rank: int = Field(ge=1, le=250)
+ overall_risk: float = RISK_TYPE
+ hazard_exposure_risk: float = RISK_TYPE
+ vulnerability_risk: float = RISK_TYPE
+ coping_capacity_risk: float = RISK_TYPE
- meta_missing_indicators_pct: Optional[float] = None
- meta_avg_recentness_years: Optional[float] = None
+ meta_missing_indicators_pct: Optional[float] = PERCENTAGE_TYPE
+ meta_avg_recentness_years: Optional[float] = Field(ge=0)
reference_period_start: Optional[NaiveDatetime]
reference_period_end: Optional[NaiveDatetime]
- dataset_hdx_stub: str = Field(max_length=128)
- dataset_hdx_provider_stub: str = Field(max_length=128)
+ # dataset_hdx_stub: str = Field(max_length=128)
+ # dataset_hdx_provider_stub: str = Field(max_length=128)
resource_hdx_id: str = Field(max_length=36)
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
+ # hapi_updated_date: datetime
+ # hapi_replaced_date: Optional[datetime]
# sector_name: str = Field(max_length=512)
diff --git a/hdx_hapi/endpoints/models/operational_presence.py b/hdx_hapi/endpoints/models/operational_presence.py
index 617ff65d..191dbee2 100644
--- a/hdx_hapi/endpoints/models/operational_presence.py
+++ b/hdx_hapi/endpoints/models/operational_presence.py
@@ -1,36 +1,23 @@
-from datetime import datetime
-from pydantic import ConfigDict, Field, model_validator, NaiveDatetime
+from pydantic import ConfigDict, Field, NaiveDatetime
from typing import Optional
-from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins
-class OperationalPresenceResponse(HapiBaseModel):
-
- sector_code: str = Field(max_length=32)
- dataset_hdx_stub: str = Field(max_length=128)
+class OperationalPresenceResponse(HapiBaseModel, HapiModelWithAdmins):
+ # dataset_hdx_stub: str = Field(max_length=128)
resource_hdx_id: str = Field(max_length=36)
org_acronym: str = Field(max_length=32)
org_name: str = Field(max_length=512)
+ sector_code: str = Field(max_length=32)
sector_name: str = Field(max_length=512)
- location_code: str = Field(max_length=128)
- location_name: str = Field(max_length=512)
- reference_period_start: Optional[NaiveDatetime]
+ reference_period_start: NaiveDatetime
reference_period_end: Optional[NaiveDatetime]
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
-
- admin1_is_unspecified: bool = Field(exclude=True)
- admin2_is_unspecified: bool = Field(exclude=True)
+ # hapi_updated_date: datetime
+ # hapi_replaced_date: Optional[datetime]
- admin1_code: Optional[str] = Field(max_length=128)
- admin1_name: Optional[str] = Field(max_length=512)
- location_ref: int = None
- admin2_code: Optional[str] = Field(max_length=128)
- admin2_name: Optional[str] = Field(max_length=512)
- admin1_ref: int = None
# resource_update_date: datetime
# org_ref: int = None,
# dataset_hdx_id: str = Field(max_length=36),
@@ -38,24 +25,7 @@ class OperationalPresenceResponse(HapiBaseModel):
# dataset_hdx_provider_stub: str = Field(max_length=128),
# dataset_hdx_provider_name: str = Field(max_length=512),
# resource_name: str = Field(max_length=256),
- # org_type_code: str = Field(max_length=32),
+ org_type_code: str = Field(max_length=32)
# org_type_description: str = Field(max_length=512),
model_config = ConfigDict(from_attributes=True)
-
- @model_validator(mode='after')
- def set_admin1_admin2_null(self) -> 'OperationalPresenceResponse':
- admin1_is_unspecified = self.admin1_is_unspecified
- admin2_is_unspecified = self.admin2_is_unspecified
-
- # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None
- if admin1_is_unspecified:
- self.admin1_code = None
- self.admin1_name = None
-
- # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None
- if admin2_is_unspecified:
- self.admin2_code = None
- self.admin2_name = None
-
- return self
diff --git a/hdx_hapi/endpoints/models/population.py b/hdx_hapi/endpoints/models/population.py
index 54ff257c..00c8af5a 100644
--- a/hdx_hapi/endpoints/models/population.py
+++ b/hdx_hapi/endpoints/models/population.py
@@ -1,50 +1,22 @@
-from datetime import datetime
-from pydantic import ConfigDict, Field, model_validator, NaiveDatetime
+from pydantic import ConfigDict, Field, NaiveDatetime
from typing import Optional
-from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hapi_schema.utils.enums import Gender
+from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins
-class PopulationResponse(HapiBaseModel):
- gender_code: Optional[str] = Field(max_length=1)
- age_range_code: Optional[str] = Field(max_length=32)
+class PopulationResponse(HapiBaseModel, HapiModelWithAdmins):
+ resource_hdx_id: str = Field(max_length=36)
+ admin2_ref: int = None
+
+ gender: Optional[Gender] = Field()
+ age_range: Optional[str] = Field(max_length=32)
+
+ min_age: Optional[int]
+ max_age: Optional[int]
population: int
reference_period_start: Optional[NaiveDatetime]
reference_period_end: Optional[NaiveDatetime]
- dataset_hdx_stub: str = Field(max_length=128)
- resource_hdx_id: str = Field(max_length=36)
- hapi_updated_date: datetime
- hapi_replaced_date: Optional[datetime]
- location_code: str = Field(max_length=128)
- location_name: str = Field(max_length=512)
-
- admin1_is_unspecified: bool = Field(exclude=True)
- admin2_is_unspecified: bool = Field(exclude=True)
-
- admin1_code: Optional[str] = Field(max_length=128)
- admin1_name: Optional[str] = Field(max_length=512)
- location_ref: int = None
- admin2_code: Optional[str] = Field(max_length=128)
- admin2_name: Optional[str] = Field(max_length=512)
- admin1_ref: int = None
-
model_config = ConfigDict(from_attributes=True)
-
- @model_validator(mode='after')
- def set_admin1_admin2_null(self) -> 'PopulationResponse':
- admin1_is_unspecified = self.admin1_is_unspecified
- admin2_is_unspecified = self.admin2_is_unspecified
-
- # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None
- if admin1_is_unspecified:
- self.admin1_code = None
- self.admin1_name = None
-
- # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None
- if admin2_is_unspecified:
- self.admin2_code = None
- self.admin2_name = None
-
- return self
diff --git a/hdx_hapi/endpoints/models/population_profile.py b/hdx_hapi/endpoints/models/population_profile.py
deleted file mode 100644
index 0e83b104..00000000
--- a/hdx_hapi/endpoints/models/population_profile.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from pydantic import ConfigDict, Field
-
-from hdx_hapi.endpoints.models.base import HapiBaseModel
-
-
-class PopulationGroupResponse(HapiBaseModel):
- code: str = Field(max_length=32)
- description: str = Field(max_length=512)
-
- model_config = ConfigDict(from_attributes=True)
-
-
-class PopulationStatusResponse(HapiBaseModel):
- code: str = Field(max_length=32)
- description: str = Field(max_length=512)
-
- model_config = ConfigDict(from_attributes=True)
diff --git a/hdx_hapi/endpoints/models/refugees.py b/hdx_hapi/endpoints/models/refugees.py
new file mode 100644
index 00000000..bf72b321
--- /dev/null
+++ b/hdx_hapi/endpoints/models/refugees.py
@@ -0,0 +1,25 @@
+from pydantic import ConfigDict, Field, NaiveDatetime
+from typing import Optional
+
+from hdx_hapi.endpoints.models.base import HapiBaseModel
+from hapi_schema.utils.enums import Gender, PopulationGroup
+
+
+class RefugeesResponse(HapiBaseModel):
+ resource_hdx_id: str = Field(max_length=36)
+ origin_location_ref: int
+ asylum_location_ref: int
+ population_group: PopulationGroup
+ gender: Gender
+ age_range: str = Field(max_length=32)
+ min_age: Optional[int] = Field(ge=0)
+ max_age: Optional[int] = Field(ge=0)
+ population: int = Field(ge=0)
+ reference_period_start: NaiveDatetime
+ reference_period_end: Optional[NaiveDatetime]
+ origin_location_code: Optional[str] = Field(max_length=128)
+ origin_location_name: Optional[str] = Field(max_length=512)
+ asylum_location_code: Optional[str] = Field(max_length=128)
+ asylum_location_name: Optional[str] = Field(max_length=512)
+
+ model_config = ConfigDict(from_attributes=True)
diff --git a/hdx_hapi/endpoints/models/util/__init__.py b/hdx_hapi/endpoints/models/util/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/hdx_hapi/endpoints/models/util/constants.py b/hdx_hapi/endpoints/models/util/constants.py
new file mode 100644
index 00000000..4e68a9c3
--- /dev/null
+++ b/hdx_hapi/endpoints/models/util/constants.py
@@ -0,0 +1,4 @@
+from pydantic import Field
+
+
+PERCENTAGE_TYPE = Field(ge=0, le=100)
diff --git a/hdx_hapi/endpoints/util/util.py b/hdx_hapi/endpoints/util/util.py
index c239321e..a1a0ca37 100644
--- a/hdx_hapi/endpoints/util/util.py
+++ b/hdx_hapi/endpoints/util/util.py
@@ -1,8 +1,9 @@
+from datetime import date
from enum import Enum
from typing import Annotated, Optional
from fastapi import Depends, Query
-from pydantic import BaseModel, ConfigDict
+from pydantic import BaseModel, ConfigDict, NaiveDatetime
_LIMIT_DESCRIPTION = 'Maximum number of records to return. The system will not return more than 10,000 records.'
@@ -11,7 +12,8 @@
)
_APP_IDENTIFIER_DESCRIPTION = (
'base64 encoded application name and email, as in `base64("app_name:email")`. '
- 'This value can also be passed in the `X-HDX-HAPI-APP-IDENTIFIER` header. See the *encode_identifier* endpoint.'
+ 'This value can also be passed in the `X-HDX-HAPI-APP-IDENTIFIER` header. '
+ 'See the *encoded_app_identifier* endpoint.'
)
app_name_identifier_query = Query(max_length=512, min_length=4, description='A name for the calling application')
@@ -21,6 +23,7 @@
pagination_offset_query = Query(ge=0, description=_OFFSET_DESCRIPTION)
common_app_identifier_query = Query(max_length=512, description=_APP_IDENTIFIER_DESCRIPTION)
+
class PaginationParams(BaseModel):
offset: int = pagination_offset_query
limit: int = pagination_limit_query
@@ -46,6 +49,41 @@ async def common_endpoint_parameters(
return CommonEndpointParams(**pagination_parameters.model_dump(), app_identifier=app_identifier)
+class ReferencePeriodParameters(BaseModel):
+ reference_period_start_min: Optional[NaiveDatetime | date] = None
+ reference_period_start_max: Optional[NaiveDatetime | date] = None
+ reference_period_end_min: Optional[NaiveDatetime | date] = None
+ reference_period_end_max: Optional[NaiveDatetime | date] = None
+
+ model_config = ConfigDict(frozen=True)
+
+
+async def reference_period_parameters(
+ reference_period_start_min: Annotated[
+ NaiveDatetime | date,
+ Query(description='Min date of reference start date, e.g. 2020-01-01 or 2020-01-01T00:00:00'),
+ ] = None,
+ reference_period_start_max: Annotated[
+ NaiveDatetime | date,
+ Query(description='Max date of reference start date, e.g. 2020-01-01 or 2020-01-01T00:00:00'),
+ ] = None,
+ reference_period_end_min: Annotated[
+ NaiveDatetime | date,
+ Query(description='Min date of reference end date, e.g. 2020-01-01 or 2020-01-01T00:00:00'),
+ ] = None,
+ reference_period_end_max: Annotated[
+ NaiveDatetime | date,
+ Query(description='Max date of reference end date, e.g. 2020-01-01 or 2020-01-01T00:00:00'),
+ ] = None,
+) -> ReferencePeriodParameters:
+ return ReferencePeriodParameters(
+ reference_period_start_min=reference_period_start_min,
+ reference_period_start_max=reference_period_start_max,
+ reference_period_end_min=reference_period_end_min,
+ reference_period_end_max=reference_period_end_max,
+ )
+
+
class OutputFormat(str, Enum):
CSV = 'csv'
JSON = 'json'
diff --git a/hdx_hapi/services/admin1_logic.py b/hdx_hapi/services/admin1_logic.py
index 9bd61296..6f13f8a7 100644
--- a/hdx_hapi/services/admin1_logic.py
+++ b/hdx_hapi/services/admin1_logic.py
@@ -1,33 +1,24 @@
-from datetime import datetime
-
-
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.db.dao.admin1_view_dao import admin1_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
async def get_admin1_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
location_code: str = None,
location_name: str = None,
):
return await admin1_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
location_code=location_code,
location_name=location_name,
)
diff --git a/hdx_hapi/services/admin2_logic.py b/hdx_hapi/services/admin2_logic.py
index 2caee857..3575092f 100644
--- a/hdx_hapi/services/admin2_logic.py
+++ b/hdx_hapi/services/admin2_logic.py
@@ -1,19 +1,15 @@
-from datetime import datetime
-
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.db.dao.admin2_view_dao import admin2_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+
async def get_admin2_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
admin1_code: str = None,
admin1_name: str = None,
location_code: str = None,
@@ -21,13 +17,10 @@ async def get_admin2_srv(
):
return await admin2_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
admin1_code=admin1_code,
admin1_name=admin1_name,
location_code=location_code,
diff --git a/hdx_hapi/services/admin_level_logic.py b/hdx_hapi/services/admin_level_logic.py
index 154f6bc6..1fa34770 100644
--- a/hdx_hapi/services/admin_level_logic.py
+++ b/hdx_hapi/services/admin_level_logic.py
@@ -1,7 +1,8 @@
+from typing import Optional
from hdx_hapi.endpoints.util.util import AdminLevel
-def compute_unspecified_values(admin_level: AdminLevel):
+def compute_unspecified_values(admin_level: Optional[AdminLevel]):
"""
Compute unspecified values for admin1 and admin2
"""
diff --git a/hdx_hapi/services/age_range_logic.py b/hdx_hapi/services/age_range_logic.py
deleted file mode 100644
index 19c81a2e..00000000
--- a/hdx_hapi/services/age_range_logic.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.db.dao.age_range_view_dao import age_ranges_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-
-async def get_age_ranges_srv(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
-):
- return await age_ranges_view_list(
- pagination_parameters=pagination_parameters,
- db=db,
- code=code,
- )
diff --git a/hdx_hapi/services/conflict_view_logic.py b/hdx_hapi/services/conflict_view_logic.py
new file mode 100644
index 00000000..c46cc722
--- /dev/null
+++ b/hdx_hapi/services/conflict_view_logic.py
@@ -0,0 +1,45 @@
+from typing import Optional, Sequence
+from hapi_schema.utils.enums import EventType
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.db.dao.conflict_event_view_dao import conflict_event_view_list
+from hdx_hapi.db.models.views.all_views import ConflictEventView
+from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters
+from hdx_hapi.services.admin_level_logic import compute_unspecified_values
+
+
+async def get_conflict_event_srv(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ event_type: Optional[EventType] = None,
+ location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin_level: Optional[AdminLevel] = None,
+) -> Sequence[ConflictEventView]:
+ admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level)
+
+ return await conflict_event_view_list(
+ pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ event_type=event_type,
+ location_ref=location_ref,
+ location_code=location_code,
+ location_name=location_name,
+ admin1_ref=admin1_ref,
+ admin1_code=admin1_code,
+ admin1_name=admin1_name,
+ admin1_is_unspecified=admin1_is_unspecified,
+ admin2_ref=admin2_ref,
+ admin2_code=admin2_code,
+ admin2_name=admin2_name,
+ admin2_is_unspecified=admin2_is_unspecified,
+ )
diff --git a/hdx_hapi/services/csv_transform_logic.py b/hdx_hapi/services/csv_transform_logic.py
index 72efd5f7..85113458 100644
--- a/hdx_hapi/services/csv_transform_logic.py
+++ b/hdx_hapi/services/csv_transform_logic.py
@@ -1,7 +1,7 @@
import csv
import io
-from typing import Dict, List, Type
+from typing import Dict, Sequence, Type
from fastapi.responses import StreamingResponse
from hdx_hapi.endpoints.models.base import HapiBaseModel
@@ -13,8 +13,8 @@
def transform_result_to_csv_stream_if_requested(
- result: List[Dict], output_format: OutputFormat, pydantic_class: Type[HapiBaseModel]
-) -> List[Dict] | StreamingResponse:
+ result: Sequence[Dict], output_format: OutputFormat, pydantic_class: Type[HapiBaseModel]
+) -> Dict[str, Sequence] | StreamingResponse:
"""
Transforms the result to a CSV stream if requested. Otherwise, returns the result as is
"""
diff --git a/hdx_hapi/services/funding_logic.py b/hdx_hapi/services/funding_logic.py
new file mode 100644
index 00000000..b0db1c9b
--- /dev/null
+++ b/hdx_hapi/services/funding_logic.py
@@ -0,0 +1,34 @@
+from typing import Optional, Sequence
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.db.dao.funding_view_dao import funding_view_list
+from hdx_hapi.db.models.views.all_views import FundingView
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+
+
+async def get_funding_srv(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ appeal_code: Optional[str] = None,
+ appeal_type: Optional[str] = None,
+ org_acronym: Optional[str] = None,
+ org_name: Optional[str] = None,
+ sector_name: Optional[str] = None,
+ # location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+) -> Sequence[FundingView]:
+
+ return await funding_view_list(
+ pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ appeal_code=appeal_code,
+ appeal_type=appeal_type,
+ org_acronym=org_acronym,
+ org_name=org_name,
+ sector_name=sector_name,
+ location_code=location_code,
+ location_name=location_name,
+ )
diff --git a/hdx_hapi/services/gender_logic.py b/hdx_hapi/services/gender_logic.py
deleted file mode 100644
index c2b2eb45..00000000
--- a/hdx_hapi/services/gender_logic.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.db.dao.gender_view_dao import genders_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-async def get_genders_srv(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None
-):
- return await genders_view_list(
- pagination_parameters=pagination_parameters,
- db=db,
- code=code,
- description=description
- )
diff --git a/hdx_hapi/services/hdx_url_logic.py b/hdx_hapi/services/hdx_url_logic.py
index 041319c2..f2696a25 100644
--- a/hdx_hapi/services/hdx_url_logic.py
+++ b/hdx_hapi/services/hdx_url_logic.py
@@ -7,73 +7,76 @@
CONFIG = get_config()
+
def get_dataset_url(dataset_id: str) -> HttpUrl:
"""Creates the full HDX URL for a dataset
-
+
Args:
- context (Context):
+ context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX URL for the specified dataset
- """
+ """
domain = CONFIG.HDX_DOMAIN
dataset_url = CONFIG.HDX_DATASET_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')
- url=dataset_url.format(domain=domain, dataset_id=dataset_id)
+ url = dataset_url.format(domain=domain, dataset_id=dataset_id)
return HttpUrl(url=url)
+
def get_dataset_api_url(dataset_id: str) -> HttpUrl:
"""Creates the full HDX API URL for a dataset
-
+
Args:
- context (Context):
+ context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX API URL for the specified dataset (package_show)
- """
+ """
domain = CONFIG.HDX_DOMAIN
dataset_api_url = CONFIG.HDX_DATASET_API_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')
- url=dataset_api_url.format(domain=domain, dataset_id=dataset_id)
+ url = dataset_api_url.format(domain=domain, dataset_id=dataset_id)
return HttpUrl(url=url)
def get_resource_url(dataset_id: str, resource_id: str) -> HttpUrl:
"""Creates the full HDX URL for a dataset
-
+
Args:
- context (Context):
+ context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX URL for the specified dataset
- """
+ """
domain = CONFIG.HDX_DOMAIN
resource_url = CONFIG.HDX_RESOURCE_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')
- url=resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id)
+ url = resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id)
return HttpUrl(url=url)
+
def get_resource_api_url(resource_id: str) -> HttpUrl:
"""Creates the full HDX API URL for a dataset
-
+
Args:
- context (Context):
+ context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX API URL for the specified dataset (package_show)
- """
+ """
domain = CONFIG.HDX_DOMAIN
resource_api_url = CONFIG.HDX_RESOURCE_API_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')
- url=resource_api_url.format(domain=domain, resource_id=resource_id)
+ url = resource_api_url.format(domain=domain, resource_id=resource_id)
return HttpUrl(url=url)
@@ -81,16 +84,34 @@ def get_organization_url(org_id: str) -> HttpUrl:
"""Creates the full HDX URL for an organization
Args:
- context (Context):
+ context (Context):
org_id (str): Organization id or name
Returns:
str: HDX URL for the specified organization
- """
+ """
domain = CONFIG.HDX_DOMAIN
organization_url = CONFIG.HDX_ORGANIZATION_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')
-
+
url = organization_url.format(domain=domain, org_id=org_id)
- return HttpUrl(url=url)
\ No newline at end of file
+ return HttpUrl(url=url)
+
+
+def get_organization_api_url(org_id: str) -> HttpUrl:
+ """Creates the full HDX API URL for an organization
+
+ Args:
+ context (Context):
+ org_id (str): Organization id or name
+ Returns:
+ str: HDX API URL for the specified organization (package_show)
+ """
+ domain = CONFIG.HDX_DOMAIN
+ organization_api_url = CONFIG.HDX_ORGANIZATION_API_URL
+ if not domain:
+ logger.warning('HDX_DOMAIN environment variable is not set.')
+
+ url = organization_api_url.format(domain=domain, org_id=org_id)
+ return HttpUrl(url=url)
diff --git a/hdx_hapi/services/humanitarian_needs_logic.py b/hdx_hapi/services/humanitarian_needs_logic.py
index bcf52e1f..1d3f8c2f 100644
--- a/hdx_hapi/services/humanitarian_needs_logic.py
+++ b/hdx_hapi/services/humanitarian_needs_logic.py
@@ -1,68 +1,66 @@
-from datetime import datetime
-
+# from datetime import datetime
+from typing import Optional, Sequence
from sqlalchemy.ext.asyncio import AsyncSession
+from hdx_hapi.db.models.views.all_views import HumanitarianNeedsView
from hdx_hapi.db.dao.humanitarian_needs_view_dao import humanitarian_needs_view_list
-from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams
from hdx_hapi.services.admin_level_logic import compute_unspecified_values
+from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters
+from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus
async def get_humanitarian_needs_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- gender_code: str = None,
- age_range_code: str = None,
- disabled_marker: bool = None,
- sector_code: str = None,
- sector_name: str = None,
- population_group_code: str = None,
- population_status_code: str = None,
- population: int = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min=None,
- resource_update_date_max=None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- location_code: str = None,
- location_name: str = None,
- admin1_code: str = None,
- # admin1_name: str = None,
- location_ref: int = None,
- admin2_code: str = None,
- admin2_name: str = None,
- admin1_ref: int = None,
- admin_level: AdminLevel = None,
-):
+ admin2_ref: Optional[int] = None,
+ gender: Optional[Gender] = None,
+ age_range: Optional[str] = None,
+ min_age: Optional[int] = None,
+ max_age: Optional[int] = None,
+ disabled_marker: Optional[DisabledMarker] = None,
+ sector_code: Optional[str] = None,
+ population_group: Optional[PopulationGroup] = None,
+ population_status: Optional[PopulationStatus] = None,
+ population: Optional[int] = None,
+ sector_name: Optional[str] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ location_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_name: Optional[str] = None,
+ admin1_is_unspecified: Optional[bool] = None,
+ admin2_is_unspecified: Optional[bool] = None,
+ admin_level: Optional[AdminLevel] = None,
+) -> Sequence[HumanitarianNeedsView]:
admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level)
return await humanitarian_needs_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
- gender_code=gender_code,
- age_range_code=age_range_code,
+ admin2_ref=admin2_ref,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
disabled_marker=disabled_marker,
sector_code=sector_code,
- sector_name=sector_name,
- population_group_code=population_group_code,
- population_status_code=population_status_code,
+ population_group=population_group,
+ population_status=population_status,
population=population,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
+ sector_name=sector_name,
location_code=location_code,
location_name=location_name,
- admin1_code=admin1_code,
- # admin1_name=admin1_name,
- admin1_is_unspecified=admin1_is_unspecified,
location_ref=location_ref,
+ admin1_code=admin1_code,
admin2_code=admin2_code,
admin2_name=admin2_name,
- admin2_is_unspecified=admin2_is_unspecified,
admin1_ref=admin1_ref,
+ admin1_name=admin1_name,
+ admin1_is_unspecified=admin1_is_unspecified,
+ admin2_is_unspecified=admin2_is_unspecified,
)
diff --git a/hdx_hapi/services/location_logic.py b/hdx_hapi/services/location_logic.py
index 8d8972db..407dcf9f 100644
--- a/hdx_hapi/services/location_logic.py
+++ b/hdx_hapi/services/location_logic.py
@@ -1,28 +1,20 @@
-from datetime import datetime
-
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.db.dao.location_view_dao import locations_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
async def get_locations_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
code: str = None,
name: str = None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
):
return await locations_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
code=code,
name=name,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
)
diff --git a/hdx_hapi/services/national_risk_logic.py b/hdx_hapi/services/national_risk_logic.py
index a8158b8a..5f696dc7 100644
--- a/hdx_hapi/services/national_risk_logic.py
+++ b/hdx_hapi/services/national_risk_logic.py
@@ -1,48 +1,45 @@
-from datetime import datetime
+from typing import Optional
+from hapi_schema.utils.enums import RiskClass
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.db.dao.national_risk_view_dao import national_risks_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
async def get_national_risks_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- risk_class: int = None,
- global_rank: int = None,
- overall_risk: float = None,
- hazard_exposure_risk: float = None,
- vulnerability_risk: float = None,
- coping_capacity_risk: float = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min=None,
- resource_update_date_max=None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- # sector_name: str = None,
- location_code: str = None,
- location_name: str = None,
+ risk_class: Optional[RiskClass] = None,
+ global_rank_min: Optional[int] = None,
+ global_rank_max: Optional[int] = None,
+ overall_risk_min: Optional[float] = None,
+ overall_risk_max: Optional[float] = None,
+ hazard_exposure_risk_min: Optional[float] = None,
+ hazard_exposure_risk_max: Optional[float] = None,
+ vulnerability_risk_min: Optional[float] = None,
+ vulnerability_risk_max: Optional[float] = None,
+ coping_capacity_risk_min: Optional[float] = None,
+ coping_capacity_risk_max: Optional[float] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
):
return await national_risks_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
risk_class=risk_class,
- global_rank=global_rank,
- overall_risk=overall_risk,
- hazard_exposure_risk=hazard_exposure_risk,
- vulnerability_risk=vulnerability_risk,
- coping_capacity_risk=coping_capacity_risk,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
- # sector_name=sector_name,
+ global_rank_min=global_rank_min,
+ global_rank_max=global_rank_max,
+ overall_risk_min=overall_risk_min,
+ overall_risk_max=overall_risk_max,
+ hazard_exposure_risk_min=hazard_exposure_risk_min,
+ hazard_exposure_risk_max=hazard_exposure_risk_max,
+ vulnerability_risk_min=vulnerability_risk_min,
+ vulnerability_risk_max=vulnerability_risk_max,
+ coping_capacity_risk_min=coping_capacity_risk_min,
+ coping_capacity_risk_max=coping_capacity_risk_max,
location_code=location_code,
location_name=location_name,
)
diff --git a/hdx_hapi/services/operational_presence_logic.py b/hdx_hapi/services/operational_presence_logic.py
index c2182ed2..dfba98dc 100644
--- a/hdx_hapi/services/operational_presence_logic.py
+++ b/hdx_hapi/services/operational_presence_logic.py
@@ -1,63 +1,49 @@
-from datetime import datetime
-
+from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from hdx_hapi.db.dao.operational_presence_view_dao import operational_presences_view_list
-from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams
+from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters
from hdx_hapi.services.admin_level_logic import compute_unspecified_values
async def get_operational_presences_srv(
pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
db: AsyncSession,
- sector_code: str = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min=None,
- resource_update_date_max=None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
- org_acronym: str = None,
- org_name: str = None,
- sector_name: str = None,
- location_code: str = None,
- location_name: str = None,
- admin1_code: str = None,
- admin1_name: str = None,
- location_ref: int = None,
- # admin1_is_unspecified=None,
- admin2_code: str = None,
- admin2_name: str = None,
- admin1_ref: int = None,
- admin_level: AdminLevel = None,
- # admin2_is_unspecified=None,
+ sector_code: Optional[str] = None,
+ org_acronym: Optional[str] = None,
+ org_name: Optional[str] = None,
+ sector_name: Optional[str] = None,
+ location_ref: Optional[int] = None,
+ location_code: Optional[str] = None,
+ location_name: Optional[str] = None,
+ admin1_ref: Optional[int] = None,
+ admin1_code: Optional[str] = None,
+ admin1_name: Optional[str] = None,
+ admin2_ref: Optional[int] = None,
+ admin2_code: Optional[str] = None,
+ admin2_name: Optional[str] = None,
+ admin_level: Optional[AdminLevel] = None,
):
-
admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level)
return await operational_presences_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
sector_code=sector_code,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
org_acronym=org_acronym,
org_name=org_name,
sector_name=sector_name,
+ location_ref=location_ref,
location_code=location_code,
location_name=location_name,
+ admin1_ref=admin1_ref,
admin1_code=admin1_code,
admin1_name=admin1_name,
admin1_is_unspecified=admin1_is_unspecified,
- location_ref=location_ref,
+ admin2_ref=admin2_ref,
admin2_code=admin2_code,
admin2_name=admin2_name,
admin2_is_unspecified=admin2_is_unspecified,
- admin1_ref=admin1_ref,
)
diff --git a/hdx_hapi/services/population_group_logic.py b/hdx_hapi/services/population_group_logic.py
deleted file mode 100644
index 26a3a2ae..00000000
--- a/hdx_hapi/services/population_group_logic.py
+++ /dev/null
@@ -1,19 +0,0 @@
-
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.db.dao.population_group_view_dao import population_groups_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-
-async def get_population_groups_srv(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None,
-):
- return await population_groups_view_list(
- pagination_parameters=pagination_parameters,
- db=db,
- code=code,
- description=description,
- )
diff --git a/hdx_hapi/services/population_logic.py b/hdx_hapi/services/population_logic.py
index 03e5a7f8..fed835b0 100644
--- a/hdx_hapi/services/population_logic.py
+++ b/hdx_hapi/services/population_logic.py
@@ -1,34 +1,31 @@
-from datetime import datetime
-
from sqlalchemy.ext.asyncio import AsyncSession
+from hapi_schema.utils.enums import Gender
+
from hdx_hapi.db.dao.population_view_dao import populations_view_list
-from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams
+from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, ReferencePeriodParameters
from hdx_hapi.services.admin_level_logic import compute_unspecified_values
async def get_populations_srv(
- pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ pagination_parameters: CommonEndpointParams,
db: AsyncSession,
- gender_code: str = None,
- age_range_code: str = None,
+ gender: Gender = None,
+ age_range: str = None,
+ min_age: int = None,
+ max_age: int = None,
population: int = None,
- dataset_hdx_provider_stub: str = None,
- resource_update_date_min=None,
- resource_update_date_max=None,
- hapi_updated_date_min: datetime = None,
- hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
+ location_ref: int = None,
location_code: str = None,
location_name: str = None,
+ admin1_ref: int = None,
admin1_name: str = None,
admin1_code: str = None,
- location_ref: int = None,
# admin1_is_unspecified: bool = None,
+ admin2_ref: int = None,
admin2_code: str = None,
admin2_name: str = None,
- admin1_ref: int = None,
admin_level: AdminLevel = None,
# admin2_is_unspecified: bool = None,
):
@@ -36,25 +33,22 @@ async def get_populations_srv(
return await populations_view_list(
pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
db=db,
- gender_code=gender_code,
- age_range_code=age_range_code,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
population=population,
- dataset_hdx_provider_stub=dataset_hdx_provider_stub,
- resource_update_date_min=resource_update_date_min,
- resource_update_date_max=resource_update_date_max,
- hapi_updated_date_min=hapi_updated_date_min,
- hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
+ admin1_ref=admin1_ref,
+ location_ref=location_ref,
location_code=location_code,
location_name=location_name,
admin1_name=admin1_name,
admin1_code=admin1_code,
- admin1_is_unspecified=admin1_is_unspecified,
- location_ref=location_ref,
- admin2_code=admin2_code,
+ admin2_ref=admin2_ref,
admin2_name=admin2_name,
+ admin2_code=admin2_code,
+ admin1_is_unspecified=admin1_is_unspecified,
admin2_is_unspecified=admin2_is_unspecified,
- admin1_ref=admin1_ref,
)
diff --git a/hdx_hapi/services/population_status_logic.py b/hdx_hapi/services/population_status_logic.py
deleted file mode 100644
index 2f109261..00000000
--- a/hdx_hapi/services/population_status_logic.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from hdx_hapi.db.dao.population_status_view_dao import population_statuses_view_list
-from hdx_hapi.endpoints.util.util import PaginationParams
-
-
-async def get_population_statuses_srv(
- pagination_parameters: PaginationParams,
- db: AsyncSession,
- code: str = None,
- description: str = None,
-):
- return await population_statuses_view_list(
- pagination_parameters=pagination_parameters,
- db=db,
- code=code,
- description=description,
- )
diff --git a/hdx_hapi/services/refugees_logic.py b/hdx_hapi/services/refugees_logic.py
new file mode 100644
index 00000000..faaefbde
--- /dev/null
+++ b/hdx_hapi/services/refugees_logic.py
@@ -0,0 +1,37 @@
+from typing import Optional, Sequence
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from hdx_hapi.db.models.views.all_views import RefugeesView
+from hdx_hapi.db.dao.refugees_view_dao import refugees_view_list
+from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters
+from hapi_schema.utils.enums import Gender, PopulationGroup
+
+
+async def get_refugees_srv(
+ pagination_parameters: PaginationParams,
+ ref_period_parameters: ReferencePeriodParameters,
+ db: AsyncSession,
+ population_group: Optional[PopulationGroup] = None,
+ gender: Optional[Gender] = None,
+ age_range: Optional[str] = None,
+ min_age: Optional[int] = None,
+ max_age: Optional[int] = None,
+ origin_location_code: Optional[str] = None,
+ origin_location_name: Optional[str] = None,
+ asylum_location_code: Optional[str] = None,
+ asylum_location_name: Optional[str] = None,
+) -> Sequence[RefugeesView]:
+ return await refugees_view_list(
+ pagination_parameters=pagination_parameters,
+ ref_period_parameters=ref_period_parameters,
+ db=db,
+ population_group=population_group,
+ gender=gender,
+ age_range=age_range,
+ min_age=min_age,
+ max_age=max_age,
+ origin_location_code=origin_location_code,
+ origin_location_name=origin_location_name,
+ asylum_location_code=asylum_location_code,
+ asylum_location_name=asylum_location_name,
+ )
diff --git a/hdx_hapi/services/resource_logic.py b/hdx_hapi/services/resource_logic.py
index 738f6122..19f71aed 100644
--- a/hdx_hapi/services/resource_logic.py
+++ b/hdx_hapi/services/resource_logic.py
@@ -15,8 +15,6 @@ async def get_resources_srv(
is_hxl: bool = None,
hapi_updated_date_min: datetime = None,
hapi_updated_date_max: datetime = None,
- hapi_replaced_date_min: datetime = None,
- hapi_replaced_date_max: datetime = None,
dataset_title: str = None,
dataset_hdx_id: str = None,
dataset_hdx_stub: str = None,
@@ -33,8 +31,6 @@ async def get_resources_srv(
is_hxl=is_hxl,
hapi_updated_date_min=hapi_updated_date_min,
hapi_updated_date_max=hapi_updated_date_max,
- hapi_replaced_date_min=hapi_replaced_date_min,
- hapi_replaced_date_max=hapi_replaced_date_max,
dataset_hdx_id=dataset_hdx_id,
dataset_hdx_stub=dataset_hdx_stub,
dataset_title=dataset_title,
diff --git a/main.py b/main.py
index a8200fee..ee68813f 100644
--- a/main.py
+++ b/main.py
@@ -15,16 +15,19 @@
from hdx_hapi.endpoints.get_encoded_identifier import router as encoded_identifier_router # noqa
from hdx_hapi.endpoints.favicon import router as favicon_router # noqa
+
from hdx_hapi.endpoints.get_population import router as population_router # noqa
from hdx_hapi.endpoints.get_operational_presence import router as operational_presence_router # noqa
+from hdx_hapi.endpoints.get_funding import router as funding_router # noqa
+from hdx_hapi.endpoints.get_conflict_events import router as conflict_events_router # noqa
from hdx_hapi.endpoints.get_admin_level import router as admin_level_router # noqa
from hdx_hapi.endpoints.get_hdx_metadata import router as dataset_router # noqa
from hdx_hapi.endpoints.get_humanitarian_response import router as humanitarian_response_router # noqa
-from hdx_hapi.endpoints.get_demographic import router as demographic_router # noqa
-from hdx_hapi.endpoints.get_food_security import router as food_security_router # noqa
+from hdx_hapi.endpoints.get_affected_people import router as affected_people_router # noqa
from hdx_hapi.endpoints.get_national_risk import router as national_risk_router # noqa
-from hdx_hapi.endpoints.get_humanitarian_needs import router as humanitarian_needs_router # noqa
-from hdx_hapi.endpoints.get_population_profile import router as population_profile_router # noqa
+# from hdx_hapi.endpoints.get_demographic import router as demographic_router # noqa
+# from hdx_hapi.endpoints.get_food_security import router as food_security_router # noqa
+# from hdx_hapi.endpoints.get_population_profile import router as population_profile_router # noqa
# from hdx_hapi.endpoints.delete_example import delete_dataset
@@ -47,15 +50,18 @@
app.include_router(encoded_identifier_router)
app.include_router(favicon_router)
app.include_router(operational_presence_router)
+app.include_router(funding_router)
+app.include_router(conflict_events_router)
app.include_router(population_router)
-app.include_router(food_security_router)
+app.include_router(affected_people_router)
app.include_router(national_risk_router)
-app.include_router(humanitarian_needs_router)
app.include_router(admin_level_router)
app.include_router(humanitarian_response_router)
-app.include_router(demographic_router)
-app.include_router(population_profile_router)
app.include_router(dataset_router)
+# app.include_router(demographic_router)
+# app.include_router(food_security_router)
+# app.include_router(population_profile_router)
+
# add middleware
@app.middleware('http')
@@ -63,6 +69,7 @@ async def app_identifier_middleware_init(request: Request, call_next):
response = await app_identifier_middleware(request, call_next)
return response
+
# add middleware
@app.middleware('http')
async def mixpanel_tracking_middleware_init(request: Request, call_next):
diff --git a/pyproject.toml b/pyproject.toml
index 70440a3d..afb0ac4f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,6 +12,7 @@ select = [
"E",
"F",
"Q",
+ "INP001", # Checks for packages that are missing an __init__.py file.
]
extend-ignore = [
"Q003", # avoidable-escaped-quote
diff --git a/requirements.txt b/requirements.txt
index d66296b9..bbd22db9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -10,4 +10,4 @@ ua-parser==0.18.0
alembic~=1.12.00
psycopg2~=2.9.7
--e git+https://github.com/OCHA-DAP/hapi-sqlalchemy-schema@v0.7.3#egg=hapi-schema
+-e git+https://github.com/OCHA-DAP/hapi-sqlalchemy-schema@v0.8.6#egg=hapi-schema
diff --git a/tests/conftest.py b/tests/conftest.py
index db8460db..d75ed7c9 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -5,12 +5,64 @@
from logging import Logger
-from sqlalchemy import create_engine, text
+from sqlalchemy import Engine, MetaData, create_engine, inspect, text
from sqlalchemy.orm import sessionmaker, Session
from typing import List
-
-SAMPLE_DATA_SQL_FILE = 'tests/sample_data.sql'
+from hapi_schema.db_admin1 import view_params_admin1
+from hapi_schema.db_admin2 import view_params_admin2
+from hapi_schema.db_dataset import view_params_dataset
+from hapi_schema.db_food_security import view_params_food_security
+from hapi_schema.db_funding import view_params_funding
+from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs
+from hapi_schema.db_location import view_params_location
+from hapi_schema.db_national_risk import view_params_national_risk
+from hapi_schema.db_operational_presence import view_params_operational_presence
+from hapi_schema.db_org_type import view_params_org_type
+from hapi_schema.db_org import view_params_org
+from hapi_schema.db_population import view_params_population
+from hapi_schema.db_refugees import view_params_refugees
+from hapi_schema.db_resource import view_params_resource
+from hapi_schema.db_sector import view_params_sector
+from hapi_schema.db_conflict_event import view_params_conflict_event
+
+from hdx_hapi.config.config import get_config
+from hdx_hapi.db.models.base import Base
+from hdx_hapi.db.models.views.util.util import CreateView
+
+SAMPLE_DATA_SQL_FILES = [
+ 'tests/sample_data/location_admin.sql',
+ 'tests/sample_data/sector.sql',
+ 'tests/sample_data/org_type.sql',
+ 'tests/sample_data/org.sql',
+ 'tests/sample_data/dataset_resource.sql',
+ 'tests/sample_data/population.sql',
+ 'tests/sample_data/operational_presence.sql',
+ 'tests/sample_data/funding.sql',
+ 'tests/sample_data/conflict_event.sql',
+ 'tests/sample_data/national_risk.sql',
+ 'tests/sample_data/humanitarian_needs.sql',
+ 'tests/sample_data/refugees.sql',
+]
+
+VIEW_LIST = [
+ view_params_admin1,
+ view_params_admin2,
+ view_params_location,
+ view_params_dataset,
+ view_params_food_security,
+ view_params_funding,
+ view_params_humanitarian_needs,
+ view_params_national_risk,
+ view_params_operational_presence,
+ view_params_org_type,
+ view_params_org,
+ view_params_population,
+ view_params_refugees,
+ view_params_resource,
+ view_params_sector,
+ view_params_conflict_event,
+]
def pytest_sessionstart(session):
@@ -18,6 +70,35 @@ def pytest_sessionstart(session):
os.environ['HAPI_IDENTIFIER_FILTERING'] = 'False'
os.environ['HDX_MIXPANEL_TOKEN'] = 'fake_token'
+ engine = create_engine(
+ get_config().SQL_ALCHEMY_PSYCOPG2_DB_URI,
+ )
+ _drop_tables_and_views(engine)
+ _create_tables_and_views(engine)
+
+
+def _create_tables_and_views(engine: Engine):
+ Base.metadata.create_all(engine)
+ with engine.connect() as conn:
+ for v in VIEW_LIST:
+ conn.execute(CreateView(v.name, v.selectable))
+ conn.commit()
+
+
+def _drop_tables_and_views(engine: Engine):
+ # drop views
+ inspector = inspect(engine)
+ views = inspector.get_view_names()
+ with engine.connect() as conn:
+ for view in views:
+ conn.execute(text(f'DROP VIEW IF EXISTS {view}'))
+ conn.commit()
+
+ # drop tables
+ metadata = MetaData()
+ metadata.reflect(bind=engine)
+ metadata.drop_all(bind=engine)
+
@pytest.fixture(scope='session')
def event_loop():
@@ -33,10 +114,9 @@ def log():
@pytest.fixture(scope='session')
def session_maker() -> sessionmaker[Session]:
-
# we don't want to import get_config before env vars are set for tests in pytest_sessionstart method
from hdx_hapi.config.config import get_config
-
+
engine = create_engine(
get_config().SQL_ALCHEMY_PSYCOPG2_DB_URI,
)
@@ -46,7 +126,7 @@ def session_maker() -> sessionmaker[Session]:
@pytest.fixture(scope='session')
def list_of_db_tables(log: Logger, session_maker: sessionmaker[Session]) -> List[str]:
- # log.info('Getting list of db tables')
+ log.info('Getting list of db tables')
session = session_maker()
try:
result = session.execute(text("SELECT tablename FROM pg_tables WHERE schemaname = 'public'"))
@@ -78,11 +158,13 @@ def populate_test_data(log: Logger, session_maker: sessionmaker[Session]):
log.info('Populating with test data')
db_session = session_maker()
try:
- with open(SAMPLE_DATA_SQL_FILE, 'r') as file:
- sql_commands = file.read()
- db_session.execute(text(sql_commands))
- db_session.commit()
- log.info('Test data inserted successfully')
+ for sample_file in SAMPLE_DATA_SQL_FILES:
+ log.info(f'Starting data insert from {sample_file}')
+ with open(sample_file, 'r') as file:
+ sql_commands = file.read()
+ db_session.execute(text(sql_commands))
+ db_session.commit()
+ log.info(f'Test data inserted successfully from {sample_file}')
except Exception as e:
log.error(f'Error while inserting test data: {str(e).splitlines()[0]}')
db_session.rollback()
diff --git a/tests/sample_data/conflict_event.sql b/tests/sample_data/conflict_event.sql
new file mode 100644
index 00000000..3c10f7d2
--- /dev/null
+++ b/tests/sample_data/conflict_event.sql
@@ -0,0 +1,3 @@
+INSERT INTO conflict_event (resource_hdx_id, admin2_ref, event_type, events, fatalities, reference_period_start, reference_period_end) VALUES
+('a8e69c6c-16fc-4983-92ee-e04e8960b51f', 4, 'POLITICAL_VIOLENCE', 10, 2, '2024-01-01', '2024-01-31'),
+('a8e69c6c-16fc-4983-92ee-e04e8960b51f', 1, 'CIVILIAN_TARGETING', 3, 0, '2024-01-01', '2024-01-31');
\ No newline at end of file
diff --git a/tests/sample_data/dataset_resource.sql b/tests/sample_data/dataset_resource.sql
new file mode 100644
index 00000000..92a47b1a
--- /dev/null
+++ b/tests/sample_data/dataset_resource.sql
@@ -0,0 +1,20 @@
+-- dummy data
+INSERT INTO dataset (hdx_id, hdx_stub, title, hdx_provider_stub, hdx_provider_name)
+VALUES
+('90deb235-1bf5-4bae-b231-3393222c2d01', 'dataset01', 'Dataset #1', 'provider01', 'Provider #1'),
+('b9e438e0-b68a-49f9-b9a9-68c0f3e93604', 'dataset02', 'Dataset #2', 'provider02', 'Provider #2'),
+('62ad6e55-5f5d-4494-854c-4110687e9e25', 'dataset03', 'Dataset #3', 'provider03', 'Provider #3');
+
+-- dummy data
+INSERT INTO resource (hdx_id, dataset_hdx_id, name, format, update_date, is_hxl, download_url, hapi_updated_date)
+VALUES
+('17acb541-9431-409a-80a8-50eda7e8ebab', '90deb235-1bf5-4bae-b231-3393222c2d01', 'resource-01.csv', 'csv', '2023-06-01 00:00:00',TRUE,
+'https://data.humdata.org/dataset/c3f001fa-b45b-464c-9460-1ca79fd39b40/resource/90deb235-1bf5-4bae-b231-3393222c2d01/download/resource-01.csv',
+ '2023-01-01 00:00:00'),
+('d1160fa9-1d58-4f96-9df5-edbff2e80895', 'b9e438e0-b68a-49f9-b9a9-68c0f3e93604','resource-02.xlsx', 'xlsx', '2023-07-01 00:00:00',TRUE,
+'https://fdw.fews.net/api/tradeflowquantityvaluefacts/?dataset=1845&country=TZ&fields=simple&format=xlsx',
+ '2023-01-01 00:00:00'),
+('a8e69c6c-16fc-4983-92ee-e04e8960b51f', '62ad6e55-5f5d-4494-854c-4110687e9e25', 'resource-03.csv', 'csv', '2023-08-01 00:00:00',TRUE,
+ 'https://data.humdata.org/dataset/7cf3cec8-dbbc-4c96-9762-1464cd0bff75/resource/62ad6e55-5f5d-4494-854c-4110687e9e25/download/resource-03.csv',
+ '2023-01-01 00:00:00');
+
diff --git a/tests/sample_data/funding.sql b/tests/sample_data/funding.sql
new file mode 100644
index 00000000..65da4cb3
--- /dev/null
+++ b/tests/sample_data/funding.sql
@@ -0,0 +1,4 @@
+-- Funding
+INSERT INTO funding (resource_hdx_id, appeal_code, appeal_name, appeal_type, location_ref, requirements_usd, funding_usd, funding_pct, reference_period_start, reference_period_end)
+VALUES
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'HFOO24', 'Foolandia HRP 2024', 'HRP', 1, 100000.3, 50000.7, 50, '2023-01-01 00:00:00', NULL);
\ No newline at end of file
diff --git a/tests/sample_data/humanitarian_needs.sql b/tests/sample_data/humanitarian_needs.sql
new file mode 100644
index 00000000..19908c90
--- /dev/null
+++ b/tests/sample_data/humanitarian_needs.sql
@@ -0,0 +1,16 @@
+-- dummy data
+INSERT INTO public.humanitarian_needs
+(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab',2, 'ALL', 'ALL',0,99,'EDU','REFUGEES','AFFECTED','YES',500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
+INSERT INTO public.humanitarian_needs
+(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 4, 'FEMALE', '0-17',0,17,'SHL','IDP','INNEED','NO',1500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
+INSERT INTO public.humanitarian_needs
+(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 6, 'MALE', '65+',65,100,'WSH','ALL','ALL','ALL',100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
+INSERT INTO public.humanitarian_needs
+(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 4, 'FEMALE', '18-64',18,64,'SHL','IDP','INNEED','NO',1500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
+INSERT INTO public.humanitarian_needs
+(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 6, 'MALE', '18-64',18,64,'WSH','ALL','ALL','ALL',100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
diff --git a/tests/sample_data/location_admin.sql b/tests/sample_data/location_admin.sql
new file mode 100644
index 00000000..ff89988b
--- /dev/null
+++ b/tests/sample_data/location_admin.sql
@@ -0,0 +1,23 @@
+-- dummy data
+INSERT INTO location (id, code, name, reference_period_start, reference_period_end)
+VALUES
+(1, 'FOO', 'Foolandia', '2023-01-01 00:00:00', NULL);
+
+-- dummy data
+INSERT INTO admin1 (id, location_ref, code, name, is_unspecified, reference_period_start, reference_period_end)
+VALUES
+(1, 1, 'FOO-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL),
+(2, 1, 'FOO-001', 'Province 01', FALSE, '2023-01-01 00:00:00', NULL),
+(3, 1, 'FOO-002', 'Province 02', FALSE, '2023-01-01 00:00:00', NULL);
+
+-- dummy data
+-- note that we need an "Unspecified" for every Admin1, including the unspecified one
+INSERT INTO admin2 (id, admin1_ref, code, name, is_unspecified, reference_period_start, reference_period_end)
+VALUES
+(1, 1, 'FOO-XXX-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL),
+(2, 2, 'FOO-001-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL),
+(3, 3, 'FOO-002-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL),
+(4, 2, 'FOO-001-A', 'District A', FALSE, '2023-01-01 00:00:00', NULL),
+(5, 2, 'FOO-001-B', 'District B', FALSE, '2023-01-01 00:00:00', NULL),
+(6, 3, 'FOO-002-C', 'District C', FALSE, '2023-01-01 00:00:00', NULL),
+(7, 3, 'FOO-002-D', 'District D', FALSE, '2023-01-01 00:00:00', NULL);
\ No newline at end of file
diff --git a/tests/sample_data/national_risk.sql b/tests/sample_data/national_risk.sql
new file mode 100644
index 00000000..a7f62f3e
--- /dev/null
+++ b/tests/sample_data/national_risk.sql
@@ -0,0 +1,5 @@
+
+INSERT INTO public.national_risk (resource_hdx_id, location_ref, risk_class, global_rank, overall_risk, hazard_exposure_risk, vulnerability_risk, coping_capacity_risk, meta_missing_indicators_pct, meta_avg_recentness_years, reference_period_start, reference_period_end) VALUES
+('17acb541-9431-409a-80a8-50eda7e8ebab', 1, 'VERY_HIGH', 4, 8.7, 8.5, 7.1, 6.6, 11, 0.2571428571428571, '2024-01-02 00:00:00', '2024-12-31 23:59:59.999999'),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 1, 'HIGH', 5, 6.9, 7.9, 6.4, 6.6, 22.2, 0.3918918918918919, '2024-01-03 00:00:00', '2024-12-31 23:59:59.999999'),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 1, 'MEDIUM', 6, 7.2, 6.8, 5.9, 6.6, 33.3, 0.3918918918918919, '2024-01-04 00:00:00', '2024-12-31 23:59:59.999999');
diff --git a/tests/sample_data/operational_presence.sql b/tests/sample_data/operational_presence.sql
new file mode 100644
index 00000000..b645123f
--- /dev/null
+++ b/tests/sample_data/operational_presence.sql
@@ -0,0 +1,8 @@
+-- 3W
+INSERT INTO operational_presence (resource_hdx_id, org_acronym, org_name, sector_code, admin2_ref, reference_period_start, reference_period_end)
+VALUES
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG01', 'Organisation 1', 'SHL', 2, '2023-01-01 00:00:00', NULL),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG02', 'Organisation 2', 'FSC', 4, '2023-01-01 00:00:00', NULL),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG03', 'Organisation 3', 'WSH', 4, '2023-01-01 00:00:00', NULL),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG03', 'Organisation 3', 'HEA', 6, '2023-01-01 00:00:00', NULL),
+('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG02', 'Organisation 2', 'WSH', 1, '2023-01-01 00:00:00', NULL);
diff --git a/tests/sample_data/org.sql b/tests/sample_data/org.sql
new file mode 100644
index 00000000..5da1a474
--- /dev/null
+++ b/tests/sample_data/org.sql
@@ -0,0 +1,5 @@
+-- dummy data
+INSERT INTO org (acronym, name, org_type_code) VALUES
+('ORG01', 'Organisation 1', '433'),
+('ORG02', 'Organisation 2', '437'),
+('ORG03', 'Organisation 3', '447');
\ No newline at end of file
diff --git a/tests/sample_data/org_type.sql b/tests/sample_data/org_type.sql
new file mode 100644
index 00000000..20f4d409
--- /dev/null
+++ b/tests/sample_data/org_type.sql
@@ -0,0 +1,16 @@
+-- dummy data
+INSERT INTO org_type (code, description) VALUES
+('431', 'Academic / Research'),
+('433', 'Donor'),
+('434', 'Embassy'),
+('435', 'Government'),
+('437', 'International NGO'),
+('438', 'International Organization'),
+('439', 'Media'),
+('440', 'Military'),
+('441', 'National NGO'),
+('443', 'Other'),
+('444', 'Private sector'),
+('445', 'Red Cross / Red Crescent'),
+('446', 'Religious'),
+('447', 'United Nations');
diff --git a/tests/sample_data/population.sql b/tests/sample_data/population.sql
new file mode 100644
index 00000000..b85b7b38
--- /dev/null
+++ b/tests/sample_data/population.sql
@@ -0,0 +1,22 @@
+-- dummy data
+
+INSERT INTO population (resource_hdx_id,admin2_ref,gender,age_range,min_age,max_age,population,reference_period_start,reference_period_end) VALUES
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'NONBINARY','10-14',10,14,1000000,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'FEMALE','25-29',25,25,500001,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'MALE','10-14',10,14,489999,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'NONBINARY','25-29',25,29,9999,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'FEMALE','0-4',0,4,300000,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',2,'NONBINARY','5-9',5,9,2000,'2023-01-01 00:00:00','2023-06-30 00:00:00'),
+ ('17acb541-9431-409a-80a8-50eda7e8ebab',4,'MALE','10-14',10,14,100000,'2023-01-01 00:00:00','2023-06-30 00:00:00');
+
+-- INSERT INTO population (id, resource_ref, admin2_ref, gender_code, age_range_code, population, reference_period_start, reference_period_end, source_data)
+-- VALUES
+-- (1, 1, 1, 'x', '10-14', 1000000, '2023-01-01 00:00:00', '2023-06-30 00:00:00', 'DATA,DATA,DATA'), -- total national
+-- (2, 1, 1, 'f', '25-29', 500001, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f), all ages
+-- (3, 1, 1, 'm', '10-14', 489999, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f), all ages
+-- (4, 1, 1, 'x', '25-29', 9999, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (x), all ages
+-- (5, 1, 1, 'f', '0-4', 300000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f) children
+-- (6, 1, 2, 'x', '5-9', 2000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- admin1 (x) adolescents
+-- (7, 1, 4, 'm', '10-14', 100000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'); -- admin2 (m) elderly
+
+-- -- end
diff --git a/tests/sample_data/refugees.sql b/tests/sample_data/refugees.sql
new file mode 100644
index 00000000..ded9b728
--- /dev/null
+++ b/tests/sample_data/refugees.sql
@@ -0,0 +1,10 @@
+-- dummy data
+INSERT INTO refugees
+(resource_hdx_id, origin_location_ref, asylum_location_ref, population_group, gender, age_range, min_age, max_age, population, reference_period_start, reference_period_end)
+VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','FEMALE','12-17',12,17,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','MALE','12-17',12,17,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','FEMALE','18-59',18,59,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','MALE','18-59',18,59,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','FEMALE','60+',60,100,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','MALE','60+',60,100,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'),
+('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REFUGEES','ALL','ALL',0,99,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00');
diff --git a/tests/sample_data/sector.sql b/tests/sample_data/sector.sql
new file mode 100644
index 00000000..2e2602e5
--- /dev/null
+++ b/tests/sample_data/sector.sql
@@ -0,0 +1,18 @@
+-- dummy data
+INSERT INTO sector (code, name)
+VALUES
+('SHL', 'Emergency Shelter and NFI'),
+('CCM', 'Camp Coordination / Management'),
+('PRO-MIN', 'Mine Action'),
+('FSC', 'Food Security'),
+('WSH', 'Water Sanitation Hygiene'),
+('LOG', 'Logistics'),
+('PRO-CPN', 'Child Protection'),
+('PRO', 'Protection'),
+('EDU', 'Education'),
+('NUT', 'Nutrition'),
+('HEA', 'Health'),
+('ERY', 'Early Recovery'),
+('TEL', 'Emergency Telecommunications'),
+('PRO-GBV', 'Gender Based Violence'),
+('PRO-HLP', 'Housing, Land and Property');
\ No newline at end of file
diff --git a/tests/test_analytics/test_api_call_tracking.py b/tests/test_analytics/test_api_call_tracking.py
index 8c642975..dddb7e21 100644
--- a/tests/test_analytics/test_api_call_tracking.py
+++ b/tests/test_analytics/test_api_call_tracking.py
@@ -13,6 +13,8 @@
log = logging.getLogger(__name__)
+ENDPOINT = '/api/v1/coordination-context/operational-presence'
+
@pytest.mark.asyncio
async def test_tracking_endpoint_success():
@@ -26,13 +28,13 @@ async def test_tracking_endpoint_success():
'HTTP_X_REAL_IP': '127.0.0.1',
}
params = {'admin_level': '1', 'output_format': 'json'}
- response = await ac.get('/api/v1/themes/3w', params=params, headers=headers)
+ response = await ac.get(ENDPOINT, params=params, headers=headers)
assert response.status_code == 200
assert send_mixpanel_event_patch.call_count == 1, 'API calls should be tracked'
expected_mixpanel_dict = {
- 'endpoint name': '/api/v1/themes/3w',
+ 'endpoint name': ENDPOINT,
'query params': ['admin_level', 'output_format'],
'time': pytest.approx(time.time()),
'app name': None,
@@ -45,13 +47,11 @@ async def test_tracking_endpoint_success():
'$os': 'Windows',
'$browser': 'Chrome',
'$browser_version': '124',
- '$current_url': f'{TEST_BASE_URL}/api/v1/themes/3w?admin_level=1&output_format=json',
+ '$current_url': f'{TEST_BASE_URL}{ENDPOINT}?admin_level=1&output_format=json',
}
- (
- send_mixpanel_event_patch.assert_called_once_with('api call', '123456', expected_mixpanel_dict),
- 'Parameters do not match the expected ones',
- )
+ # Check parameters match the expected ones
+ send_mixpanel_event_patch.assert_called_once_with('api call', '123456', expected_mixpanel_dict)
@pytest.mark.asyncio
diff --git a/tests/test_endpoints/endpoint_data.py b/tests/test_endpoints/endpoint_data.py
index ba31ba34..c2c98450 100644
--- a/tests/test_endpoints/endpoint_data.py
+++ b/tests/test_endpoints/endpoint_data.py
@@ -1,23 +1,27 @@
from datetime import date
+from hapi_schema.utils.enums import RiskClass
+
endpoint_data = {
- '/api/admin1': {
+ '/api/v1/metadata/admin1': {
'query_parameters': {
'code': 'FoO-001',
'name': 'Province 01',
'location_code': 'FoO',
'location_name': 'Foolandia',
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-01T00:00:00',
},
'expected_fields': [
'code',
'name',
'location_code',
'location_name',
- 'hapi_updated_date',
- 'hapi_replaced_date',
+ 'reference_period_start',
+ 'reference_period_end',
],
},
- '/api/admin2': {
+ '/api/v1/metadata/admin2': {
'query_parameters': {
'code': 'FoO-001-A',
'name': 'District A',
@@ -25,6 +29,8 @@
'admin1_name': 'Province 01',
'location_code': 'FOo',
'location_name': 'Foolandia',
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-01T00:00:00',
},
'expected_fields': [
'code',
@@ -33,19 +39,15 @@
'admin1_name',
'location_code',
'location_name',
- 'hapi_updated_date',
- 'hapi_replaced_date',
+ 'reference_period_start',
+ 'reference_period_end',
],
},
- '/api/age_range': {
- 'query_parameters': {'code': '10-14'},
- 'expected_fields': ['code', 'age_min', 'age_max'],
- },
- '/api/dataset': {
+ '/api/v1/metadata/dataset': {
'query_parameters': {
- 'hdx_id': 'c3f001fa-b45b-464c-9460-1ca79fd39b40',
+ 'hdx_id': '90deb235-1bf5-4bae-b231-3393222c2d01',
'title': 'Dataset #1',
- 'hdx_provider_stub': 'Provider01',
+ 'hdx_provider_stub': 'pRoViDeR01',
'hdx_provider_name': 'Provider #1',
},
'expected_fields': [
@@ -56,55 +58,124 @@
'hdx_provider_name',
'hdx_link', # computed field
'hdx_api_link', # computed field
+ 'provider_hdx_link', # computed field
+ 'provider_hdx_api_link', # computed field
],
},
- '/api/gender': {
- 'query_parameters': {'code': 'F', 'name': 'female'},
- 'expected_fields': ['code', 'description'],
+ '/api/v1/metadata/location': {
+ 'query_parameters': {
+ 'code': 'foo',
+ 'name': 'Foolandia',
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-01T00:00:00',
+ },
+ 'expected_fields': [
+ 'code',
+ 'name',
+ 'reference_period_start',
+ 'reference_period_end',
+ ],
},
- '/api/location': {
- 'query_parameters': {'code': 'foo', 'name': 'Foolandia'},
- 'expected_fields': ['code', 'name'],
+ '/api/v1/coordination-context/conflict-event': {
+ 'query_parameters': {
+ 'event_type': 'political_violence',
+ 'location_ref': 1,
+ 'location_code': 'foo',
+ 'location_name': 'Foolandia',
+ 'admin1_ref': 2,
+ 'admin1_code': 'foo-001',
+ 'admin1_name': 'province',
+ 'admin2_ref': 4,
+ 'admin2_code': 'foo-001-a',
+ 'admin2_name': 'district',
+ 'reference_period_start_min': '2024-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-02T00:00:00',
+ 'reference_period_end_min': '2024-01-30T00:00:00',
+ 'reference_period_end_max': '2024-02-01T00:00:00',
+ },
+ 'expected_fields': [
+ 'event_type',
+ 'events',
+ 'fatalities',
+ 'resource_hdx_id',
+ 'location_ref',
+ 'location_code',
+ 'location_name',
+ 'admin1_ref',
+ 'admin1_code',
+ 'admin1_name',
+ 'admin2_ref',
+ 'admin2_code',
+ 'admin2_name',
+ 'reference_period_start',
+ 'reference_period_end',
+ ],
},
- '/api/themes/3W': {
+ '/api/v1/coordination-context/funding': {
+ 'query_parameters': {
+ 'appeal_code': 'hfoo24',
+ 'appeal_type': 'hRp',
+ 'location_code': 'foo',
+ 'location_name': 'Foolandia',
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-01T00:00:00',
+ },
+ 'expected_fields': [
+ 'resource_hdx_id',
+ 'appeal_code',
+ 'appeal_name',
+ 'appeal_type',
+ 'requirements_usd',
+ 'funding_usd',
+ 'funding_pct',
+ 'location_ref',
+ 'location_code',
+ 'location_name',
+ 'reference_period_start',
+ 'reference_period_end',
+ ],
+ },
+ '/api/v1/coordination-context/operational-presence': {
'query_parameters': {
- 'sector_code': 'Shl',
- 'dataset_hdx_provider_stub': 'PROVIDER01',
- 'resource_update_date_min': date(2023, 6, 1),
- 'resource_update_date_max': date(2023, 6, 2),
'org_acronym': 'oRG01',
'org_name': 'Organisation 1',
+ 'sector_code': 'Shl',
'sector_name': 'Emergency Shelter and NFI',
+ 'location_ref': 1,
'location_code': 'foo',
'location_name': 'Foolandia',
+ 'admin1_ref': 2,
'admin1_code': 'foo-001',
+ 'admin1_name': 'province',
'admin1_is_unspecified': False,
+ 'admin2_ref': 2,
'admin2_code': 'foo-001-xxx',
'admin2_name': 'Unspecified',
'admin2_is_unspecified': True,
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-01T00:00:00',
},
'expected_fields': [
'sector_code',
- 'dataset_hdx_stub',
'resource_hdx_id',
- 'hapi_updated_date',
- 'hapi_replaced_date',
'org_acronym',
'org_name',
+ 'org_type_code',
'sector_name',
+ 'location_ref',
'location_code',
'location_name',
'reference_period_start',
'reference_period_end',
+ 'admin1_ref',
'admin1_code',
'admin1_name',
- 'location_ref',
+ 'admin2_ref',
'admin2_code',
'admin2_name',
- 'admin1_ref',
],
},
- '/api/org': {
+ '/api/v1/metadata/org': {
'query_parameters': {
'acronym': 'Org01',
'name': 'Organisation 1',
@@ -113,60 +184,51 @@
},
'expected_fields': ['acronym', 'name', 'org_type_code', 'org_type_description'],
},
- '/api/org_type': {
+ '/api/v1/metadata/org_type': {
'query_parameters': {
'code': '431',
- 'name': 'national', # International
+ 'name': 'Academic / Research',
},
'expected_fields': ['code', 'description'],
},
- '/api/themes/population': {
+ '/api/v1/population-social/population': {
'query_parameters': {
- 'gender_code': 'X',
- 'age_range_code': '10-14',
+ 'admin2_ref': 1,
+ 'gender': 'x',
+ 'age_range': '10-14',
+ 'min_age': 10,
+ 'max_age': 14,
'population': 1000000,
- 'dataset_hdx_provider_stub': 'PROvider01',
- 'resource_update_date_min': date(2023, 6, 1),
- 'resource_update_date_max': date(2023, 6, 2),
+ 'reference_period_start': date(2023, 6, 1),
+ 'reference_period_end': date(2023, 6, 2),
'location_code': 'fOO',
'location_name': 'Foolandia',
'admin1_code': 'FOO-xxx',
- 'admin1_is_unspecified': True,
+ 'admin1_is_unspecified': False,
'admin2_code': 'FOO-xxx-XXX',
'admin2_name': 'Unspecified',
'admin2_is_unspecified': True,
},
'expected_fields': [
- 'gender_code',
- 'age_range_code',
+ 'resource_hdx_id',
+ 'admin2_ref',
+ 'gender',
+ 'age_range',
+ 'min_age',
+ 'max_age',
'population',
'reference_period_start',
'reference_period_end',
- 'dataset_hdx_stub',
- 'resource_hdx_id',
- 'hapi_updated_date',
- 'hapi_replaced_date',
+ 'location_ref',
'location_code',
'location_name',
+ 'admin1_ref',
'admin1_code',
'admin1_name',
- 'location_ref',
'admin2_code',
'admin2_name',
- 'admin1_ref',
],
},
- '/api/population_group': {
- 'query_parameters': {
- 'code': 'refugees',
- 'description': 'refugee', # refugees
- },
- 'expected_fields': ['code', 'description'],
- },
- '/api/population_status': {
- 'query_parameters': {'code': 'inneed', 'description': 'people'},
- 'expected_fields': ['code', 'description'],
- },
'/api/themes/food_security': {
'query_parameters': {
'ipc_phase_code': '1',
@@ -205,18 +267,23 @@
'admin1_ref',
],
},
- '/api/themes/national_risk': {
+ '/api/v1/coordination-context/national-risk': {
'query_parameters': {
- 'risk_class': 5,
- 'global_rank': 4,
- 'overall_risk': 8.1,
- 'hazard_exposure_risk': 8.7,
- 'vulnerability_risk': 8.5,
- 'coping_capacity_risk': 7.1,
- 'dataset_hdx_provider_stub': 'pRoViDeR01',
- 'resource_update_date_min': date(2023, 6, 1),
- 'resource_update_date_max': date(2023, 6, 2),
- # 'sector_name': 'Emergency Shelter and NFI',
+ 'risk_class': RiskClass.HIGH.value,
+ 'global_rank_min': 5,
+ 'global_rank_max': 7,
+ 'overall_risk_min': 6,
+ 'overall_risk_max': 10,
+ 'hazard_exposure_risk_min': 6,
+ 'hazard_exposure_risk_max': 10,
+ 'vulnerability_risk_min': 5,
+ 'vulnerability_risk_max': 10,
+ 'coping_capacity_risk_min': 6.1,
+ 'coping_capacity_risk_max': 10.1,
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2024-01-11T00:00:00',
+ 'reference_period_end_min': '2023-01-01T00:00:00',
+ 'reference_period_end_max': '2025-01-01T00:00:00',
'location_code': 'fOO',
'location_name': 'Foolandia',
},
@@ -231,85 +298,114 @@
'meta_avg_recentness_years',
'reference_period_start',
'reference_period_end',
- 'dataset_hdx_stub',
- 'dataset_hdx_provider_stub',
'resource_hdx_id',
- 'hapi_updated_date',
- 'hapi_replaced_date',
- # "sector_name",
'location_code',
'location_name',
],
},
- '/api/themes/humanitarian_needs': {
+ '/api/v1/affected-people/humanitarian-needs': {
'query_parameters': {
- 'gender_code': 'm',
- 'age_range_code': '0-4',
- 'disabled_marker': False,
+ 'admin2_ref': 2,
+ 'gender': '*',
+ 'age_range': 'ALL',
+ 'min_age': 0,
+ 'max_age': 99,
+ 'disabled_marker': 'y',
'sector_code': 'EDU',
+ 'population_group': 'REF',
+ 'population_status': 'AFF',
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2026-01-01T00:00:00',
'sector_name': 'Education',
- 'population_group_code': 'refugees',
- 'population_status_code': 'inneed',
- 'population': 100,
- 'dataset_hdx_provider_stub': 'PROvider01',
- 'resource_update_date_min': date(2023, 6, 1),
- 'resource_update_date_max': date(2023, 6, 2),
- 'location_code': 'fOO',
+ 'location_code': 'foo',
'location_name': 'Foolandia',
- 'admin1_code': 'FOO-xxx',
- 'admin1_is_unspecified': True,
- 'admin2_code': 'FOO-xxx-XXX',
+ 'location_ref': 1,
+ 'admin1_code': 'FOO-001',
+ 'admin1_name': 'Province 01',
+ 'admin2_code': 'foo-001-XXX',
'admin2_name': 'Unspecified',
- 'admin2_is_unspecified': True,
+ 'admin1_ref': 2,
},
'expected_fields': [
- 'gender_code',
- 'age_range_code',
+ 'resource_hdx_id',
+ 'admin2_ref',
+ 'gender',
+ 'age_range',
+ 'min_age',
+ 'max_age',
'disabled_marker',
'sector_code',
- 'population_group_code',
- 'population_status_code',
+ 'population_group',
+ 'population_status',
'population',
'reference_period_start',
'reference_period_end',
- 'dataset_hdx_stub',
- 'dataset_hdx_provider_stub',
- 'resource_hdx_id',
- 'hapi_updated_date',
- 'hapi_replaced_date',
'sector_name',
'location_code',
'location_name',
+ 'location_ref',
'admin1_code',
'admin1_name',
- 'location_ref',
'admin2_code',
'admin2_name',
'admin1_ref',
],
},
- '/api/resource': {
+ '/api/v1/affected-people/refugees': {
'query_parameters': {
- 'hdx_id': '90deb235-1bf5-4bae-b231-3393222c2d01',
+ 'population_group': 'REF',
+ 'gender': '*',
+ 'age_range': 'ALL',
+ 'min_age': 0,
+ 'max_age': 99,
+ 'reference_period_start_min': '2020-01-01T00:00:00',
+ 'reference_period_start_max': '2026-01-01T00:00:00',
+ 'origin_location_code': 'foo',
+ 'origin_location_name': 'Foolandia',
+ 'asylum_location_code': 'foo',
+ 'asylum_location_name': 'Foolandia',
+ },
+ 'expected_fields': [
+ 'resource_hdx_id',
+ 'origin_location_ref',
+ 'asylum_location_ref',
+ 'population_group',
+ 'gender',
+ 'age_range',
+ 'min_age',
+ 'max_age',
+ 'population',
+ 'reference_period_start',
+ 'reference_period_end',
+ 'origin_location_code',
+ 'origin_location_name',
+ 'asylum_location_code',
+ 'asylum_location_name',
+ ],
+ },
+ '/api/v1/metadata/resource': {
+ 'query_parameters': {
+ 'hdx_id': '17acb541-9431-409a-80a8-50eda7e8ebab',
+ 'name': 'resource-01.csv',
'format': 'csv',
'update_date_min': date(2023, 6, 1),
- 'update_date_max': date(2023, 6, 2),
+ 'update_date_max': date(2023, 6, 3),
'is_hxl': True,
- 'dataset_hdx_id': 'c3f001fa-b45b-464c-9460-1ca79fd39b40',
+ 'hapi_updated_date': date(2023, 6, 2),
+ 'dataset_hdx_stub': 'dataset01',
'dataset_title': 'Dataset #1',
'dataset_hdx_provider_stub': 'pRoViDeR01',
'dataset_hdx_provider_name': 'Provider #1',
},
'expected_fields': [
'hdx_id',
+ 'dataset_hdx_id',
'name',
'format',
'update_date',
'is_hxl',
- 'hapi_updated_date',
- 'hapi_replaced_date',
'download_url',
- 'dataset_hdx_id',
+ 'hapi_updated_date',
'dataset_hdx_stub',
'dataset_title',
'dataset_hdx_provider_stub',
@@ -318,20 +414,22 @@
'hdx_api_link', # computed field
'dataset_hdx_link', # computed field
'dataset_hdx_api_link', # computed field
+ 'provider_hdx_link', # computed field
+ 'provider_hdx_api_link', # computed field
],
},
- '/api/sector': {
+ '/api/v1/metadata/sector': {
'query_parameters': {
'code': 'Pro',
- 'name': 'Protect', # Protection
+ 'name': 'Protection', # Protection
},
'expected_fields': ['code', 'name'],
},
- '/api/encode_identifier': {
+ '/api/encode_app_identifier': {
'query_parameters': {
'application': 'web_application_1',
'email': 'info@example.com',
},
- 'expected_fields': ['encoded_identifier'],
+ 'expected_fields': ['encoded_app_identifier'],
},
}
diff --git a/tests/test_endpoints/test_admin1_endpoint.py b/tests/test_endpoints/test_admin1_endpoint.py
index e9c97f9d..d013e193 100644
--- a/tests/test_endpoints/test_admin1_endpoint.py
+++ b/tests/test_endpoints/test_admin1_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/admin1'
+ENDPOINT_ROUTER = '/api/v1/metadata/admin1'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_admin2_endpoint.py b/tests/test_endpoints/test_admin2_endpoint.py
index fff118ac..d83db068 100644
--- a/tests/test_endpoints/test_admin2_endpoint.py
+++ b/tests/test_endpoints/test_admin2_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/admin2'
+ENDPOINT_ROUTER = '/api/v1/metadata/admin2'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_conflict_event_endpoint.py b/tests/test_endpoints/test_conflict_event_endpoint.py
new file mode 100644
index 00000000..4e0103a0
--- /dev/null
+++ b/tests/test_endpoints/test_conflict_event_endpoint.py
@@ -0,0 +1,173 @@
+from datetime import datetime
+from hapi_schema.utils.enums import EventType
+import pytest
+import logging
+
+from httpx import AsyncClient
+from hdx_hapi.endpoints.models.conflict_event import ConflictEventResponse
+from main import app
+from tests.test_endpoints.endpoint_data import endpoint_data
+
+log = logging.getLogger(__name__)
+
+ENDPOINT_ROUTER = '/api/v1/coordination-context/conflict-event'
+endpoint_data = endpoint_data[ENDPOINT_ROUTER]
+query_parameters = endpoint_data['query_parameters']
+expected_fields = endpoint_data['expected_fields']
+
+
+@pytest.mark.asyncio
+async def test_get_conflict_events(event_loop, refresh_db):
+ log.info('started test_get_conflict_events')
+ async with AsyncClient(app=app, base_url='http://test') as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+ assert response.status_code == 200
+ assert len(response.json()['data']) > 0, 'There should be at least one operational presence in the database'
+
+
+@pytest.mark.asyncio
+async def test_get_conflict_event_params(event_loop, refresh_db):
+ log.info('started test_get_conflict_event_params')
+
+ for param_name, param_value in query_parameters.items():
+ async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+
+ assert response.status_code == 200
+ assert len(response.json()['data']) > 0, (
+ 'There should be at least one conflict_event entry for parameter '
+ f'"{param_name}" with value "{param_value}" in the database'
+ )
+
+ async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+
+ assert response.status_code == 200
+ assert (
+ len(response.json()['data']) > 0
+ ), 'There should be at least one conflict_event entry for all parameters in the database'
+
+
+@pytest.mark.asyncio
+async def test_get_conflict_event_result(event_loop, refresh_db):
+ log.info('started test_get_conflict_event_result')
+
+ async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+
+ for field in expected_fields:
+ assert field in response.json()['data'][0], f'Field "{field}" not found in the response'
+
+ for field in response.json()['data'][0]:
+ assert field in expected_fields, f'Field "{field}" unexpected'
+
+ assert len(response.json()['data'][0]) == len(
+ expected_fields
+ ), 'Response has a different number of fields than expected'
+
+
+@pytest.mark.asyncio
+async def test_get_conflict_event_adm_fields(event_loop, refresh_db):
+ log.info('started test_get_conflict_event_adm_fields')
+
+ conflict_event_view_adm_specified = ConflictEventResponse(
+ resource_hdx_id='test-resource1',
+ event_type=EventType.CIVILIAN_TARGETING,
+ events=10,
+ fatalities=2,
+ location_ref=1,
+ location_code='Foolandia',
+ location_name='FOO-XXX',
+ admin1_ref=1,
+ admin1_is_unspecified=False,
+ admin1_code='FOO-XXX',
+ admin1_name='Province 01',
+ admin2_ref=1,
+ admin2_is_unspecified=False,
+ admin2_code='FOO-XXX-XXX',
+ admin2_name='District A',
+ reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'),
+ reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'),
+ )
+
+ assert (
+ conflict_event_view_adm_specified.admin1_code == 'FOO-XXX'
+ ), 'admin1_code should keep its value when admin1_is_unspecified is False'
+ assert (
+ conflict_event_view_adm_specified.admin1_name == 'Province 01'
+ ), 'admin1_name should keep its value when admin1_is_unspecified is False'
+ assert (
+ conflict_event_view_adm_specified.admin2_code == 'FOO-XXX-XXX'
+ ), 'admin2_code should keep its value when admin1_is_unspecified is False'
+ assert (
+ conflict_event_view_adm_specified.admin2_name == 'District A'
+ ), 'admin2_name should keep its value when admin1_is_unspecified is False'
+
+ conflict_event_view_adm_unspecified = ConflictEventResponse(
+ resource_hdx_id='test-resource1',
+ event_type=EventType.CIVILIAN_TARGETING,
+ events=10,
+ fatalities=2,
+ location_ref=1,
+ location_code='Foolandia',
+ location_name='FOO-XXX',
+ admin1_is_unspecified=True,
+ admin1_ref=1,
+ admin1_code='FOO-XXX',
+ admin1_name='Unpecified',
+ admin2_ref=1,
+ admin2_is_unspecified=True,
+ admin2_code='FOO-XXX-XXX',
+ admin2_name='Unspecified',
+ reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'),
+ reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'),
+ )
+
+ assert (
+ conflict_event_view_adm_unspecified.admin1_code is None
+ ), 'admin1_code should be changed to None when admin1_is_unspecified is True'
+ assert (
+ conflict_event_view_adm_unspecified.admin1_name is None
+ ), 'admin1_name should be changed to None when admin1_is_unspecified is True'
+ assert (
+ conflict_event_view_adm_unspecified.admin2_code is None
+ ), 'admin2_code should be changed to None when admin1_is_unspecified is True'
+ assert (
+ conflict_event_view_adm_unspecified.admin2_name is None
+ ), 'admin2_name should be changed to None when admin1_is_unspecified is True'
+
+
+@pytest.mark.asyncio
+async def test_get_conflict_event_admin_level(event_loop, refresh_db):
+ log.info('started test_get_conflict_event_admin_level')
+
+ async with AsyncClient(
+ app=app,
+ base_url='http://test',
+ ) as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+
+ assert len(response.json()['data'][0]) == len(
+ expected_fields
+ ), 'Response has a different number of fields than expected'
+
+ response_items = response.json()['data']
+ admin_0_count = len(
+ [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None]
+ )
+ admin_1_count = len(
+ [item for item in response_items if item['admin1_name'] is not None and item['admin2_name'] is None]
+ )
+ admin_2_count = len(
+ [item for item in response_items if item['admin1_name'] is not None and item['admin2_name'] is not None]
+ )
+ counts_map = {
+ '0': admin_0_count,
+ '1': admin_1_count,
+ '2': admin_2_count,
+ }
+
+ for admin_level, count in counts_map.items():
+ async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac:
+ response = await ac.get(ENDPOINT_ROUTER)
+ assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries'
diff --git a/tests/test_endpoints/test_dataset_endpoint.py b/tests/test_endpoints/test_dataset_endpoint.py
index 9ec88ddc..17246d7a 100644
--- a/tests/test_endpoints/test_dataset_endpoint.py
+++ b/tests/test_endpoints/test_dataset_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/dataset'
+ENDPOINT_ROUTER = '/api/v1/metadata/dataset'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_encode_identifier.py b/tests/test_endpoints/test_encode_identifier.py
index dd850716..69bb81ff 100644
--- a/tests/test_endpoints/test_encode_identifier.py
+++ b/tests/test_endpoints/test_encode_identifier.py
@@ -9,7 +9,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/encode_identifier'
+ENDPOINT_ROUTER = '/api/encode_app_identifier'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@@ -43,9 +43,10 @@ async def test_get_encoded_identifier_results(event_loop, refresh_db):
assert field in response.json(), f'Field "{field}" not found in the response'
assert len(response.json()) == len(expected_fields), 'Response has a different number of fields than expected'
- assert response.json() == {'encoded_identifier': 'd2ViX2FwcGxpY2F0aW9uXzE6aW5mb0BleGFtcGxlLmNvbQ=='}
+ assert response.json() == {'encoded_app_identifier': 'd2ViX2FwcGxpY2F0aW9uXzE6aW5mb0BleGFtcGxlLmNvbQ=='}
assert (
- base64.b64decode(response.json()['encoded_identifier']).decode('utf-8') == 'web_application_1:info@example.com'
+ base64.b64decode(response.json()['encoded_app_identifier']).decode('utf-8')
+ == 'web_application_1:info@example.com'
)
diff --git a/tests/test_endpoints/test_endpoints_vs_encode_identifier.py b/tests/test_endpoints/test_endpoints_vs_encode_identifier.py
index 3a337be4..2f9c33b7 100644
--- a/tests/test_endpoints/test_endpoints_vs_encode_identifier.py
+++ b/tests/test_endpoints/test_endpoints_vs_encode_identifier.py
@@ -9,21 +9,23 @@
log = logging.getLogger(__name__)
ENDPOINT_ROUTER_LIST = [
- '/api/v1/admin1',
- '/api/v1/admin2',
- '/api/v1/dataset',
- '/api/v1/themes/food_security',
- '/api/v1/themes/humanitarian_needs',
- '/api/v1/location',
- '/api/v1/themes/national_risk',
- '/api/v1/themes/3W',
- '/api/v1/org',
- '/api/v1/org_type',
- '/api/v1/themes/population',
- '/api/v1/population_group',
- '/api/v1/population_status',
- '/api/v1/resource',
- '/api/v1/sector',
+ '/api/v1/metadata/admin1',
+ '/api/v1/metadata/admin2',
+ '/api/v1/metadata/dataset',
+ '/api/v1/affected-people/humanitarian-needs',
+ '/api/v1/metadata/location',
+ '/api/v1/metadata/org',
+ '/api/v1/metadata/org_type',
+ '/api/v1/metadata/resource',
+ '/api/v1/metadata/sector',
+ '/api/v1/population-social/population',
+ '/api/v1/coordination-context/national-risk',
+ '/api/v1/coordination-context/operational-presence',
+ '/api/v1/affected-people/refugees',
+ '/api/v1/coordination-context/funding',
+ '/api/v1/coordination-context/conflict-event',
+ # TODO to fix the following endpoints
+ '/api/v1/food/food-security',
]
@@ -42,7 +44,7 @@ async def test_endpoints_vs_encode_identifier(event_loop, refresh_db, enable_hap
async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
response = await ac.get(endpoint_router)
- assert response.status_code == 200
+ assert response.status_code == 400
response_items = response.json()
assert len(response_items) > 0
@@ -50,7 +52,7 @@ async def test_endpoints_vs_encode_identifier(event_loop, refresh_db, enable_hap
@pytest.mark.asyncio
async def test_encode_identifier(event_loop, refresh_db, enable_hapi_identifier_filtering):
# testing the encode identifier endpoint
- endpoint_router = '/api/v1/encode_identifier'
+ endpoint_router = '/api/v1/encode_app_identifier'
# it should not be important if app_identifier is passed or not to the endpoint
async with AsyncClient(app=app, base_url='http://test') as ac:
diff --git a/tests/test_endpoints/test_gender_endpoint.py b/tests/test_endpoints/test_funding_endpoint.py
similarity index 71%
rename from tests/test_endpoints/test_gender_endpoint.py
rename to tests/test_endpoints/test_funding_endpoint.py
index cb33c4e7..a08684c8 100644
--- a/tests/test_endpoints/test_gender_endpoint.py
+++ b/tests/test_endpoints/test_funding_endpoint.py
@@ -1,29 +1,30 @@
import pytest
import logging
+
from httpx import AsyncClient
from main import app
from tests.test_endpoints.endpoint_data import endpoint_data
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/gender'
+ENDPOINT_ROUTER = '/api/v1/coordination-context/funding'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@pytest.mark.asyncio
-async def test_get_genders(event_loop, refresh_db):
- log.info('started test_get_genders')
+async def test_get_fundings(event_loop, refresh_db):
+ log.info('started test_get_fundings')
async with AsyncClient(app=app, base_url='http://test') as ac:
response = await ac.get(ENDPOINT_ROUTER)
assert response.status_code == 200
- assert len(response.json()['data']) > 0, 'There should be at least one gender in the database'
+ assert len(response.json()['data']) > 0, 'There should be at least one funding in the database'
@pytest.mark.asyncio
-async def test_get_gender_params(event_loop, refresh_db):
- log.info('started test_get_gender_params')
+async def test_get_funding_params(event_loop, refresh_db):
+ log.info('started test_get_funding_params')
for param_name, param_value in query_parameters.items():
async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
@@ -31,7 +32,7 @@ async def test_get_gender_params(event_loop, refresh_db):
assert response.status_code == 200
assert len(response.json()['data']) > 0, (
- 'There should be at least one gender entry for parameter '
+ 'There should be at least one funding entry for parameter '
f'"{param_name}" with value "{param_value}" in the database'
)
@@ -41,12 +42,12 @@ async def test_get_gender_params(event_loop, refresh_db):
assert response.status_code == 200
assert (
len(response.json()['data']) > 0
- ), 'There should be at least one gender entry for all parameters in the database'
+ ), 'There should be at least one funding entry for all parameters in the database'
@pytest.mark.asyncio
-async def test_get_gender_result(event_loop, refresh_db):
- log.info('started test_get_gender_result')
+async def test_get_funding_result(event_loop, refresh_db):
+ log.info('started test_get_funding_result')
async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
response = await ac.get(ENDPOINT_ROUTER)
@@ -54,6 +55,9 @@ async def test_get_gender_result(event_loop, refresh_db):
for field in expected_fields:
assert field in response.json()['data'][0], f'Field "{field}" not found in the response'
+ for field in response.json()['data'][0]:
+ assert field in expected_fields, f'Field "{field}" unexpected'
+
assert len(response.json()['data'][0]) == len(
expected_fields
), 'Response has a different number of fields than expected'
diff --git a/tests/test_endpoints/test_humanitarian_needs_endpoint.py b/tests/test_endpoints/test_humanitarian_needs_endpoint.py
index f8a9a376..4fd51ac3 100644
--- a/tests/test_endpoints/test_humanitarian_needs_endpoint.py
+++ b/tests/test_endpoints/test_humanitarian_needs_endpoint.py
@@ -8,7 +8,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/themes/humanitarian_needs'
+ENDPOINT_ROUTER = '/api/v1/affected-people/humanitarian-needs'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@@ -66,29 +66,34 @@ async def test_get_humanitarian_needs_adm_fields(event_loop, refresh_db):
log.info('started test_get_humanitarian_needs_adm_fields')
humanitarian_needs_view_adm_specified = HumanitarianNeedsResponse(
- gender_code='m',
- age_range_code='0-1',
- sector_code='ABC',
- sector_name='Sector Name',
- population_status_code='inneed',
- population_group_code='abcd',
- dataset_hdx_provider_stub='provider01',
- dataset_hdx_stub='test-dataset1',
- resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
+ resource_hdx_id='17acb541-9431-409a-80a8-50eda7e8ebab',
+ gender='f',
+ age_range='1-2',
+ min_age=0,
+ max_age=99,
+ disabled_marker='y',
+ sector_code='EDU',
+ population_group='REF',
+ population_status='AFF',
+ population=500000,
+ reference_period_start='2023-01-01 00:00:00',
+ reference_period_end='2023-03-31 23:59:59',
+ sector_name='Education',
location_code='Foolandia',
location_name='FOO-XXX',
admin1_is_unspecified=False,
+ admin1_ref=1,
+ admin2_ref=1,
admin1_code='FOO-XXX',
admin1_name='Province 01',
admin2_is_unspecified=False,
admin2_code='FOO-XXX-XXX',
admin2_name='District A',
- reference_period_start='2023-01-01 00:00:00',
- reference_period_end='2023-03-31 23:59:59',
+ location_ref=2,
)
+ assert True
+
assert (
humanitarian_needs_view_adm_specified.admin1_code == 'FOO-XXX'
), 'admin1_code should keep its value when admin1_is_unspecified is False'
@@ -103,27 +108,30 @@ async def test_get_humanitarian_needs_adm_fields(event_loop, refresh_db):
), 'admin2_name should keep its value when admin1_is_unspecified is False'
humanitarian_needs_view_adm_unspecified = HumanitarianNeedsResponse(
- gender_code='f',
- age_range_code='1-2',
- sector_code='DEF',
- sector_name='Sector_name2',
- population_status_code='inneed',
- population_group_code='efgh',
- dataset_hdx_stub='test-dataset2',
- dataset_hdx_provider_stub='provider02',
- resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
+ resource_hdx_id='17acb541-9431-409a-80a8-50eda7e8ebab',
+ gender='f',
+ age_range='1-2',
+ min_age=0,
+ max_age=99,
+ disabled_marker='y',
+ sector_code='EDU',
+ population_group='REF',
+ population_status='AFF',
+ population=500000,
+ reference_period_start='2023-01-01 00:00:00',
+ reference_period_end='2023-03-31 23:59:59',
+ sector_name='Education',
location_code='Foolandia',
location_name='FOO-XXX',
admin1_is_unspecified=True,
+ admin1_ref=1,
+ admin2_ref=1,
admin1_code='FOO-XXX',
- admin1_name='Unpecified',
+ admin1_name='Unspecified',
admin2_is_unspecified=True,
admin2_code='FOO-XXX',
admin2_name='Unspecified',
- reference_period_start='2023-01-01 00:00:00',
- reference_period_end='2023-03-31 23:59:59',
+ location_ref=2,
)
assert (
diff --git a/tests/test_endpoints/test_location_endpoint.py b/tests/test_endpoints/test_location_endpoint.py
index 7faa60d3..03880ad4 100644
--- a/tests/test_endpoints/test_location_endpoint.py
+++ b/tests/test_endpoints/test_location_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/location'
+ENDPOINT_ROUTER = '/api/v1/metadata/location'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_national_risk_endpoint.py b/tests/test_endpoints/test_national_risk_endpoint.py
index 89657a9e..fd3eda60 100644
--- a/tests/test_endpoints/test_national_risk_endpoint.py
+++ b/tests/test_endpoints/test_national_risk_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/themes/national_risk'
+ENDPOINT_ROUTER = '/api/v1/coordination-context/national-risk'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_operational_presence_endpoint.py b/tests/test_endpoints/test_operational_presence_endpoint.py
index a72d91e8..9ad47539 100644
--- a/tests/test_endpoints/test_operational_presence_endpoint.py
+++ b/tests/test_endpoints/test_operational_presence_endpoint.py
@@ -1,3 +1,4 @@
+from datetime import datetime
import pytest
import logging
@@ -8,7 +9,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/themes/3W'
+ENDPOINT_ROUTER = '/api/v1/coordination-context/operational-presence'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@@ -67,24 +68,24 @@ async def test_get_operational_presence_adm_fields(event_loop, refresh_db):
operational_presence_view_adm_specified = OperationalPresenceResponse(
sector_code='ABC',
- age_range_code='0-1',
- dataset_hdx_stub='test-dataset1',
resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
org_acronym='ORG01',
org_name='Organisation 1',
+ org_type_code='unimportant',
sector_name='Sector Name',
+ location_ref=1,
location_code='Foolandia',
location_name='FOO-XXX',
+ admin1_ref=1,
admin1_is_unspecified=False,
admin1_code='FOO-XXX',
admin1_name='Province 01',
+ admin2_ref=1,
admin2_is_unspecified=False,
admin2_code='FOO-XXX-XXX',
admin2_name='District A',
- reference_period_start='2023-01-01 00:00:00',
- reference_period_end='2023-03-31 23:59:59',
+ reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'),
+ reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'),
)
assert (
@@ -102,24 +103,24 @@ async def test_get_operational_presence_adm_fields(event_loop, refresh_db):
operational_presence_view_adm_unspecified = OperationalPresenceResponse(
sector_code='ABC',
- age_range_code='0-1',
- dataset_hdx_stub='test-dataset1',
resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
org_acronym='ORG01',
org_name='Organisation 1',
+ org_type_code='unimportant',
sector_name='Sector Name',
+ location_ref=1,
location_code='Foolandia',
location_name='FOO-XXX',
admin1_is_unspecified=True,
+ admin1_ref=1,
admin1_code='FOO-XXX',
admin1_name='Unpecified',
+ admin2_ref=1,
admin2_is_unspecified=True,
- admin2_code='FOO-XXX',
+ admin2_code='FOO-XXX-XXX',
admin2_name='Unspecified',
- reference_period_start='2023-01-01 00:00:00',
- reference_period_end='2023-03-31 23:59:59',
+ reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'),
+ reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'),
)
assert (
diff --git a/tests/test_endpoints/test_org_endpoint.py b/tests/test_endpoints/test_org_endpoint.py
index 25100716..31fa73a8 100644
--- a/tests/test_endpoints/test_org_endpoint.py
+++ b/tests/test_endpoints/test_org_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/org'
+ENDPOINT_ROUTER = '/api/v1/metadata/org'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_org_type_endpoint.py b/tests/test_endpoints/test_org_type_endpoint.py
index 8cfe16fb..339f0290 100644
--- a/tests/test_endpoints/test_org_type_endpoint.py
+++ b/tests/test_endpoints/test_org_type_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/org_type'
+ENDPOINT_ROUTER = '/api/v1/metadata/org_type'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
diff --git a/tests/test_endpoints/test_output_format.py b/tests/test_endpoints/test_output_format.py
index a5522189..80fc09e1 100644
--- a/tests/test_endpoints/test_output_format.py
+++ b/tests/test_endpoints/test_output_format.py
@@ -11,23 +11,23 @@
# query_parameters = endpoint_data['query_parameters']
# expected_fields = endpoint_data['expected_fields']
ENDPOINT_ROUTER_LIST = [
- '/api/v1/admin1',
- '/api/v1/admin2',
- '/api/v1/age_range',
- '/api/v1/dataset',
- '/api/v1/gender',
- '/api/v1/location',
- '/api/v1/themes/3W',
- '/api/v1/org',
- '/api/v1/org_type',
- '/api/v1/themes/population',
- '/api/v1/population_group',
- '/api/v1/population_status',
- '/api/v1/themes/food_security',
- '/api/v1/themes/national_risk',
- '/api/v1/themes/humanitarian_needs',
- '/api/v1/resource',
- '/api/v1/sector',
+ '/api/v1/metadata/admin1',
+ '/api/v1/metadata/admin2',
+ #'/api/v1/age_range',
+ '/api/v1/metadata/dataset',
+ #'/api/v1/gender',
+ '/api/v1/metadata/location',
+ #'/api/v1/themes/3W',
+ '/api/v1/metadata/org',
+ '/api/v1/metadata/org_type',
+ '/api/v1/population-social/population',
+ #'/api/v1/population_group',
+ #'/api/v1/population_status',
+ #'/api/v1/themes/food_security',
+ #'/api/v1/themes/national_risk',
+ #'/api/v1/themes/humanitarian_needs',
+ '/api/v1/metadata/resource',
+ '/api/v1/metadata/sector',
]
diff --git a/tests/test_endpoints/test_population_endpoint.py b/tests/test_endpoints/test_population_endpoint.py
index 5c323e31..b1813f76 100644
--- a/tests/test_endpoints/test_population_endpoint.py
+++ b/tests/test_endpoints/test_population_endpoint.py
@@ -2,13 +2,15 @@
import logging
from httpx import AsyncClient
+from hapi_schema.utils.enums import Gender
+
from hdx_hapi.endpoints.models.population import PopulationResponse
from main import app
from tests.test_endpoints.endpoint_data import endpoint_data
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/themes/population'
+ENDPOINT_ROUTER = '/api/v1/population-social/population'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@@ -31,20 +33,13 @@ async def test_get_population_params(event_loop, refresh_db):
async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
response = await ac.get(ENDPOINT_ROUTER)
+ log.info(f'{param_name}:{param_value} - {len(response.json()["data"]) } rows')
assert response.status_code == 200
assert len(response.json()['data']) > 0, (
f'There should be at least one population entry for parameter "{param_name}" with value "{param_value}" '
'in the database'
)
- async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- assert response.status_code == 200
- assert (
- len(response.json()['data']) > 0
- ), 'There should be at least one population entry for all parameters in the database'
-
@pytest.mark.asyncio
async def test_get_population_result(event_loop, refresh_db):
@@ -64,27 +59,27 @@ async def test_get_population_result(event_loop, refresh_db):
@pytest.mark.asyncio
async def test_get_population_adm_fields(event_loop, refresh_db):
log.info('started test_get_population_adm_fields')
-
population_view_adm_specified = PopulationResponse(
- gender_code='f',
- age_range_code='0-1',
- population=1,
- dataset_hdx_stub='test-dataset1',
- resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
- location_code='Foolandia',
- location_name='FOO-XXX',
- admin1_is_unspecified=False,
+ resource_hdx_id='foo',
+ admin2_ref=1,
+ gender=Gender.MALE,
+ age_range='10-14',
+ min_age=10,
+ max_age=14,
+ population=100,
+ reference_period_start='2023-01-01 00:00:00',
+ reference_period_end='2023-03-31 23:59:59',
+ location_ref=1,
+ location_code='FOO',
+ location_name='Foolandia',
+ admin1_ref=1,
admin1_code='FOO-XXX',
admin1_name='Province 01',
- admin2_is_unspecified=False,
+ admin1_is_unspecified=False,
admin2_code='FOO-XXX-XXX',
admin2_name='District A',
- reference_period_start='2023-01-01 00:00:00',
- reference_period_end='2023-03-31 23:59:59',
+ admin2_is_unspecified=False,
)
-
assert (
population_view_adm_specified.admin1_code == 'FOO-XXX'
), 'admin1_code should keep its value when admin1_is_unspecified is False'
@@ -99,23 +94,25 @@ async def test_get_population_adm_fields(event_loop, refresh_db):
), 'admin2_name should keep its value when admin1_is_unspecified is False'
population_view_adm_unspecified = PopulationResponse(
- gender_code='f',
- age_range_code='0-1',
- population=1,
- dataset_hdx_stub='test-dataset1',
- resource_hdx_id='test-resource1',
- hapi_updated_date='2023-01-01 00:00:00',
- hapi_replaced_date=None,
- location_code='Foolandia',
- location_name='FOO-XXX',
- admin1_is_unspecified=True,
- admin1_code='FOO-XXX',
- admin1_name='Unpecified',
- admin2_is_unspecified=True,
- admin2_code='FOO-XXX',
- admin2_name='Unspecified',
+ resource_hdx_id='foo',
+ admin2_ref=1,
+ gender=Gender.MALE,
+ age_range='10-14',
+ min_age=10,
+ max_age=14,
+ population=100,
reference_period_start='2023-01-01 00:00:00',
reference_period_end='2023-03-31 23:59:59',
+ location_ref=1,
+ location_code='FOO',
+ location_name='Foolandia',
+ admin1_ref=1,
+ admin1_code='FOO-XXX',
+ admin1_name='Unspecified',
+ admin1_is_unspecified=True,
+ admin2_code='FOO-XXX-XXX',
+ admin2_name='Unspecified',
+ admin2_is_unspecified=True,
)
assert (
@@ -162,6 +159,9 @@ async def test_get_population_admin_level(event_loop, refresh_db):
'2': admin_2_count,
}
+ for item in response_items:
+ log.info(f"{item['admin1_name']}, {item['admin2_name']}")
+ log.info(counts_map)
for admin_level, count in counts_map.items():
async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac:
response = await ac.get(ENDPOINT_ROUTER)
diff --git a/tests/test_endpoints/test_population_group_endpoint.py b/tests/test_endpoints/test_population_group_endpoint.py
deleted file mode 100644
index 121b3dfa..00000000
--- a/tests/test_endpoints/test_population_group_endpoint.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import pytest
-import logging
-
-from httpx import AsyncClient
-from main import app
-from tests.test_endpoints.endpoint_data import endpoint_data
-
-log = logging.getLogger(__name__)
-
-ENDPOINT_ROUTER = '/api/population_group'
-endpoint_data = endpoint_data[ENDPOINT_ROUTER]
-query_parameters = endpoint_data['query_parameters']
-expected_fields = endpoint_data['expected_fields']
-
-
-@pytest.mark.asyncio
-async def test_get_population_group(event_loop, refresh_db):
- log.info('started test_get_population_group')
- async with AsyncClient(app=app, base_url='http://test') as ac:
- response = await ac.get(ENDPOINT_ROUTER)
- assert response.status_code == 200
- assert len(response.json()['data']) > 0, 'There should be at least one population group in the database'
-
-
-@pytest.mark.asyncio
-async def test_get_population_group_params(event_loop, refresh_db):
- log.info('started test_get_population_group_params')
-
- for param_name, param_value in query_parameters.items():
- async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- assert response.status_code == 200
- assert len(response.json()['data']) > 0, (
- f'There should be at least one population group entry for parameter "{param_name}" '
- f'with value "{param_value}" in the database'
- )
-
- async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- assert response.status_code == 200
- assert (
- len(response.json()['data']) > 0
- ), 'There should be at least one population group for all parameters in the database'
-
-
-@pytest.mark.asyncio
-async def test_get_population_group_result(event_loop, refresh_db):
- log.info('started test_get_population_group_result')
-
- async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- for field in expected_fields:
- assert field in response.json()['data'][0], f'Field "{field}" not found in the response'
-
- assert len(response.json()['data'][0]) == len(
- expected_fields
- ), 'Response has a different number of fields than expected'
diff --git a/tests/test_endpoints/test_population_status_endpoint.py b/tests/test_endpoints/test_population_status_endpoint.py
deleted file mode 100644
index 9024478a..00000000
--- a/tests/test_endpoints/test_population_status_endpoint.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import pytest
-import logging
-
-from httpx import AsyncClient
-from main import app
-from tests.test_endpoints.endpoint_data import endpoint_data
-
-log = logging.getLogger(__name__)
-
-ENDPOINT_ROUTER = '/api/population_status'
-endpoint_data = endpoint_data[ENDPOINT_ROUTER]
-query_parameters = endpoint_data['query_parameters']
-expected_fields = endpoint_data['expected_fields']
-
-
-@pytest.mark.asyncio
-async def test_get_population_status(event_loop, refresh_db):
- log.info('started test_get_population_status')
- async with AsyncClient(app=app, base_url='http://test') as ac:
- response = await ac.get(ENDPOINT_ROUTER)
- assert response.status_code == 200
- assert len(response.json()['data']) > 0, 'There should be at least one population status in the database'
-
-
-@pytest.mark.asyncio
-async def test_get_population_status_params(event_loop, refresh_db):
- log.info('started test_get_population_status_params')
-
- for param_name, param_value in query_parameters.items():
- async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- assert response.status_code == 200
- assert len(response.json()['data']) > 0, (
- f'There should be at least one population status entry for parameter "{param_name}" '
- f'with value "{param_value}" in the database'
- )
-
- async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- assert response.status_code == 200
- assert (
- len(response.json()['data']) > 0
- ), 'There should be at least one population status for all parameters in the database'
-
-
-@pytest.mark.asyncio
-async def test_get_population_status_result(event_loop, refresh_db):
- log.info('started test_get_population_status_result')
-
- async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
- response = await ac.get(ENDPOINT_ROUTER)
-
- for field in expected_fields:
- assert field in response.json()['data'][0], f'Field "{field}" not found in the response'
-
- assert len(response.json()['data'][0]) == len(
- expected_fields
- ), 'Response has a different number of fields than expected'
diff --git a/tests/test_endpoints/test_age_range_endpoint.py b/tests/test_endpoints/test_refugees.py
similarity index 71%
rename from tests/test_endpoints/test_age_range_endpoint.py
rename to tests/test_endpoints/test_refugees.py
index b2f2e67f..a8409ca8 100644
--- a/tests/test_endpoints/test_age_range_endpoint.py
+++ b/tests/test_endpoints/test_refugees.py
@@ -7,24 +7,24 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/age_range'
+ENDPOINT_ROUTER = '/api/v1/affected-people/refugees'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@pytest.mark.asyncio
-async def test_get_age_ranges(event_loop, refresh_db):
- log.info('started test_get_age_ranges')
+async def test_get_refugees(event_loop, refresh_db):
+ log.info('started test_get_refugees')
async with AsyncClient(app=app, base_url='http://test') as ac:
response = await ac.get(ENDPOINT_ROUTER)
assert response.status_code == 200
- assert len(response.json()['data']) > 0, 'There should be at least one age range in the database'
+ assert len(response.json()['data']) > 0, 'There should be at least one refugees entry in the database'
@pytest.mark.asyncio
-async def test_get_age_range_params(event_loop, refresh_db):
- log.info('started test_get_age_range_params')
+async def test_get_refugee_params(event_loop, refresh_db):
+ log.info('started test_get_refugee_params')
for param_name, param_value in query_parameters.items():
async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac:
@@ -32,8 +32,8 @@ async def test_get_age_range_params(event_loop, refresh_db):
assert response.status_code == 200
assert len(response.json()['data']) > 0, (
- 'There should be at least one age_range entry for parameter '
- f'"{param_name}" with value "{param_value}" in the database'
+ f'There should be at least one refugee entry for parameter "{param_name}" with value "{param_value}" '
+ 'in the database'
)
async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
@@ -42,12 +42,12 @@ async def test_get_age_range_params(event_loop, refresh_db):
assert response.status_code == 200
assert (
len(response.json()['data']) > 0
- ), 'There should be at least one age_range entry for all parameters in the database'
+ ), 'There should be at least one refugee entry for all parameters in the database'
@pytest.mark.asyncio
-async def test_get_age_range_result(event_loop, refresh_db):
- log.info('started test_get_age_range_result')
+async def test_get_refugee_result(event_loop, refresh_db):
+ log.info('started test_get_refugee_result')
async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac:
response = await ac.get(ENDPOINT_ROUTER)
diff --git a/tests/test_endpoints/test_resource_endpoint.py b/tests/test_endpoints/test_resource_endpoint.py
index 2da1b5ca..2ac9e858 100644
--- a/tests/test_endpoints/test_resource_endpoint.py
+++ b/tests/test_endpoints/test_resource_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/resource'
+ENDPOINT_ROUTER = '/api/v1/metadata/resource'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']
@@ -55,6 +55,9 @@ async def test_get_resource_result(event_loop, refresh_db):
for field in expected_fields:
assert field in response.json()['data'][0], f'Field "{field}" not found in the response'
+ for field in response.json()['data'][0]:
+ assert field in expected_fields, f'Field "{field}" unexpected'
+
assert len(response.json()['data'][0]) == len(
expected_fields
), 'Response has a different number of fields than expected'
diff --git a/tests/test_endpoints/test_sector_endpoint.py b/tests/test_endpoints/test_sector_endpoint.py
index 4ba37564..11933ce9 100644
--- a/tests/test_endpoints/test_sector_endpoint.py
+++ b/tests/test_endpoints/test_sector_endpoint.py
@@ -7,7 +7,7 @@
log = logging.getLogger(__name__)
-ENDPOINT_ROUTER = '/api/sector'
+ENDPOINT_ROUTER = '/api/v1/metadata/sector'
endpoint_data = endpoint_data[ENDPOINT_ROUTER]
query_parameters = endpoint_data['query_parameters']
expected_fields = endpoint_data['expected_fields']