diff --git a/.gitignore b/.gitignore index cc348ef7..8d4aff37 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,3 @@ venv/ coverage.xml *.pg_restore - diff --git a/.vscode/launch.json b/.vscode/launch.json index 379fed4a..88827aa0 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,5 +12,6 @@ "console": "integratedTerminal", "justMyCode": true, } - ] -} \ No newline at end of file + ], + +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f2332707..6dd37943 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ # Contributing -The Humanitarian API (HAPI) is being developed by a team from the [Centre for Humanitarian Data](https://centre.humdata.org/). +The Humanitarian API (HDX HAPI) is being developed by a team from the [Centre for Humanitarian Data](https://centre.humdata.org/). HDX developers are using [Visual Code](https://code.visualstudio.com/) as a standard IDE for this project with development taking place inside Docker containers. @@ -26,15 +26,15 @@ docker-compose exec -T hapi sh -c "pytest --log-level=INFO --cov=. --cov-report As an integration test the `docs` endpoint is inspected "manually". -A local copy of HAPI can be run by importing a snapshot of the database using the following shell script invocation in the host machine. +A local copy of HDX HAPI can be run by importing a snapshot of the database using the following shell script invocation in the host machine. ```shell ./restore_database.sh https://github.com/OCHA-DAP/hapi-pipelines/raw/db-export/database/hapi_db.pg_restore hapi ``` -The HAPI application can then be launched using the `start` launch configuration in Visual Code, this serves the documentation at `http://localhost:8844/docs` and the API at `http://localhost:8844/api` in the host machine. +The HDX HAPI application can then be launched using the `start` launch configuration in Visual Code, this serves the documentation at `http://localhost:8844/docs` and the API at `http://localhost:8844/api` in the host machine. -The HAPI database can be accessed locally with the following connection details: +The HDX HAPI database can be accessed locally with the following connection details: ``` URL: jdbc:postgresql://localhost:45432/hapi diff --git a/alembic/env.py b/alembic/env.py index ced68b9a..e3c29200 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -23,24 +23,24 @@ from hdx_hapi.db.models.base import Base from hapi_schema.db_admin1 import DBAdmin1 from hapi_schema.db_admin2 import DBAdmin2 -from hapi_schema.db_location import DBLocation from hapi_schema.db_age_range import DBAgeRange -from hapi_schema.db_gender import DBGender from hapi_schema.db_dataset import DBDataset -from hapi_schema.db_resource import DBResource -from hapi_schema.db_org import DBOrg -from hapi_schema.db_org_type import DBOrgType -from hapi_schema.db_sector import DBSector from hapi_schema.db_food_security import DBFoodSecurity +from hapi_schema.db_gender import DBGender from hapi_schema.db_humanitarian_needs import DBHumanitarianNeeds from hapi_schema.db_ipc_phase import DBIpcPhase from hapi_schema.db_ipc_type import DBIpcType +from hapi_schema.db_location import DBLocation from hapi_schema.db_national_risk import DBNationalRisk -from hapi_schema.db_population_group import DBPopulationGroup -from hapi_schema.db_population_status import DBPopulationStatus - from hapi_schema.db_operational_presence import DBOperationalPresence +from hapi_schema.db_org import DBOrg +from hapi_schema.db_org_type import DBOrgType +from hapi_schema.db_patch import DBPatch from hapi_schema.db_population import DBPopulation +from hapi_schema.db_population_group import DBPopulationGroup +from hapi_schema.db_population_status import DBPopulationStatus +from hapi_schema.db_resource import DBResource +from hapi_schema.db_sector import DBSector target_metadata = Base.metadata # target_metadata = None @@ -56,7 +56,7 @@ def _get_db_uri() -> str: db_url = db_url_dict.get('sqlalchemy.url') if db_url_dict else None if not db_url: db_url = f'postgresql+psycopg2://{create_pg_uri_from_env_without_protocol()}' - # print(f'My db url is {x_url}') + # print(f'My db url is {db_url}') return db_url diff --git a/alembic/versions/5ea413542a49_create_views.py b/alembic/versions/2d6db74775b5_create_views.py similarity index 68% rename from alembic/versions/5ea413542a49_create_views.py rename to alembic/versions/2d6db74775b5_create_views.py index 99e0fd67..677602b5 100644 --- a/alembic/versions/5ea413542a49_create_views.py +++ b/alembic/versions/2d6db74775b5_create_views.py @@ -1,8 +1,8 @@ """create views -Revision ID: 5ea413542a49 -Revises: be60e42db4db -Create Date: 2023-09-07 20:31:02.198042 +Revision ID: 2d6db74775b5 +Revises: 927d2ce143cc +Create Date: 2024-04-25 15:02:49.678672 """ from typing import Sequence, Union @@ -22,11 +22,11 @@ from hapi_schema.db_location import view_params_location from hapi_schema.db_national_risk import view_params_national_risk from hapi_schema.db_operational_presence import view_params_operational_presence -from hapi_schema.db_org_type import view_params_org_type from hapi_schema.db_org import view_params_org +from hapi_schema.db_org_type import view_params_org_type +from hapi_schema.db_population import view_params_population from hapi_schema.db_population_group import view_params_population_group from hapi_schema.db_population_status import view_params_population_status -from hapi_schema.db_population import view_params_population from hapi_schema.db_resource import view_params_resource from hapi_schema.db_sector import view_params_sector @@ -34,19 +34,32 @@ # revision identifiers, used by Alembic. -revision: str = '5ea413542a49' -down_revision: Union[str, None] = 'be60e42db4db' +revision: str = '2d6db74775b5' +down_revision: Union[str, None] = '927d2ce143cc' branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None VIEW_LIST = [ - view_params_admin1, view_params_admin2, view_params_age_range, view_params_dataset, - view_params_food_security, view_params_gender, view_params_humanitarian_needs, - view_params_ipc_phase, view_params_ipc_type, view_params_location, view_params_national_risk, - view_params_operational_presence, view_params_org_type, view_params_org, - view_params_population_group, view_params_population_status, view_params_population, - view_params_resource, view_params_sector + view_params_admin1, + view_params_admin2, + view_params_age_range, + view_params_dataset, + view_params_food_security, + view_params_gender, + view_params_humanitarian_needs, + view_params_ipc_phase, + view_params_ipc_type, + view_params_location, + view_params_national_risk, + view_params_operational_presence, + view_params_org_type, + view_params_org, + view_params_population_group, + view_params_population_status, + view_params_population, + view_params_resource, + view_params_sector, ] @@ -57,5 +70,4 @@ def upgrade() -> None: def downgrade() -> None: for v in VIEW_LIST: - op.get_bind().execute(DropView(v.name)) - + op.get_bind().execute(DropView(v.name)) diff --git a/alembic/versions/be60e42db4db_initialization.py b/alembic/versions/927d2ce143cc_initialization.py similarity index 58% rename from alembic/versions/be60e42db4db_initialization.py rename to alembic/versions/927d2ce143cc_initialization.py index 81ac1a6a..9b09064c 100644 --- a/alembic/versions/be60e42db4db_initialization.py +++ b/alembic/versions/927d2ce143cc_initialization.py @@ -1,8 +1,8 @@ -"""Initialization +"""initialization -Revision ID: be60e42db4db +Revision ID: 927d2ce143cc Revises: -Create Date: 2023-09-20 12:43:26.000600 +Create Date: 2024-04-25 15:02:30.586918 """ from typing import Sequence, Union @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. -revision: str = 'be60e42db4db' +revision: str = '927d2ce143cc' down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -37,60 +37,119 @@ def upgrade() -> None: sa.UniqueConstraint('hdx_id'), sa.UniqueConstraint('hdx_stub') ) - op.create_index(op.f('ix_dataset_hdx_provider_stub'), 'dataset', ['hdx_provider_stub'], unique=False) op.create_index(op.f('ix_dataset_hdx_provider_name'), 'dataset', ['hdx_provider_name'], unique=False) + op.create_index(op.f('ix_dataset_hdx_provider_stub'), 'dataset', ['hdx_provider_stub'], unique=False) op.create_table('gender', sa.Column('code', sa.String(length=1), nullable=False), sa.Column('description', sa.String(length=256), nullable=False), sa.PrimaryKeyConstraint('code') ) + op.create_table('ipc_phase', + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('name', sa.String(length=32), nullable=False), + sa.Column('description', sa.String(length=512), nullable=False), + sa.CheckConstraint("code IN ('1', '2', '3', '4', '5', '3+', 'all')", name='ipc_phase_code'), + sa.PrimaryKeyConstraint('code') + ) + op.create_table('ipc_type', + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('description', sa.String(length=512), nullable=False), + sa.CheckConstraint("code IN ('current', 'first projection', 'second projection')", name='ipc_phase_type'), + sa.PrimaryKeyConstraint('code') + ) op.create_table('location', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(length=128), nullable=False), sa.Column('name', sa.String(length=512), nullable=False), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), + sa.Column('reference_period_start', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.Column('hapi_updated_date', sa.DateTime(), nullable=False), + sa.Column('hapi_replaced_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.CheckConstraint('(hapi_replaced_date IS NULL) OR (hapi_replaced_date >= hapi_updated_date)', name='hapi_dates'), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('code') + sa.UniqueConstraint('code', 'hapi_updated_date') ) + op.create_index(op.f('ix_location_hapi_replaced_date'), 'location', ['hapi_replaced_date'], unique=False) + op.create_index(op.f('ix_location_hapi_updated_date'), 'location', ['hapi_updated_date'], unique=False) + op.create_index(op.f('ix_location_reference_period_end'), 'location', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_location_reference_period_start'), 'location', ['reference_period_start'], unique=False) op.create_table('org_type', sa.Column('code', sa.String(length=32), nullable=False), sa.Column('description', sa.String(length=512), nullable=False), sa.PrimaryKeyConstraint('code') ) + op.create_index(op.f('ix_org_type_description'), 'org_type', ['description'], unique=False) + op.create_table('patch', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('patch_sequence_number', sa.Integer(), nullable=False), + sa.Column('commit_hash', sa.String(length=48), nullable=False), + sa.Column('commit_date', sa.DateTime(), nullable=False), + sa.Column('patch_path', sa.String(length=128), nullable=False), + sa.Column('permanent_download_url', sa.String(length=1024), nullable=False), + sa.Column('state', sa.Enum('discovered', 'executed', 'failed', 'canceled', name='stateenum'), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('commit_hash'), + sa.UniqueConstraint('permanent_download_url') + ) + op.create_index(op.f('ix_patch_patch_sequence_number'), 'patch', ['patch_sequence_number'], unique=False) + op.create_index(op.f('ix_patch_state'), 'patch', ['state'], unique=False) + op.create_table('population_group', + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('description', sa.String(length=512), nullable=False), + sa.PrimaryKeyConstraint('code') + ) + op.create_index(op.f('ix_population_group_description'), 'population_group', ['description'], unique=False) + op.create_table('population_status', + sa.Column('code', sa.String(length=32), nullable=False), + sa.Column('description', sa.String(length=512), nullable=False), + sa.PrimaryKeyConstraint('code') + ) + op.create_index(op.f('ix_population_status_description'), 'population_status', ['description'], unique=False) op.create_table('sector', sa.Column('code', sa.String(length=32), nullable=False), sa.Column('name', sa.String(length=512), nullable=False), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), - sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.PrimaryKeyConstraint('code') ) op.create_index(op.f('ix_sector_name'), 'sector', ['name'], unique=False) - op.create_index(op.f('ix_sector_reference_period_start'), 'sector', ['reference_period_start'], unique=False) op.create_table('admin1', sa.Column('id', sa.Integer(), nullable=False), sa.Column('location_ref', sa.Integer(), nullable=False), sa.Column('code', sa.String(length=128), nullable=False), sa.Column('name', sa.String(length=512), nullable=False), sa.Column('is_unspecified', sa.Boolean(), server_default=sa.text('FALSE'), nullable=False), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), + sa.Column('reference_period_start', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.Column('hapi_updated_date', sa.DateTime(), nullable=False), + sa.Column('hapi_replaced_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.CheckConstraint('(hapi_replaced_date IS NULL) OR (hapi_replaced_date >= hapi_updated_date)', name='hapi_dates'), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.ForeignKeyConstraint(['location_ref'], ['location.id'], onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('code') + sa.UniqueConstraint('code', 'hapi_updated_date') ) + op.create_index(op.f('ix_admin1_hapi_replaced_date'), 'admin1', ['hapi_replaced_date'], unique=False) + op.create_index(op.f('ix_admin1_hapi_updated_date'), 'admin1', ['hapi_updated_date'], unique=False) + op.create_index(op.f('ix_admin1_reference_period_end'), 'admin1', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_admin1_reference_period_start'), 'admin1', ['reference_period_start'], unique=False) op.create_table('org', sa.Column('id', sa.Integer(), nullable=False), - # sa.Column('hdx_link', sa.String(length=1024), nullable=False), sa.Column('acronym', sa.String(length=32), nullable=False), sa.Column('name', sa.String(length=512), nullable=False), - sa.Column('org_type_code', sa.String(length=32), nullable=False), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), + sa.Column('org_type_code', sa.String(length=32), nullable=True), + sa.Column('reference_period_start', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.Column('hapi_updated_date', sa.DateTime(), nullable=False), + sa.Column('hapi_replaced_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.CheckConstraint('(hapi_replaced_date IS NULL) OR (hapi_replaced_date >= hapi_updated_date)', name='hapi_dates'), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.ForeignKeyConstraint(['org_type_code'], ['org_type.code'], onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_org_acronym'), 'org', ['acronym'], unique=False) + op.create_index(op.f('ix_org_hapi_replaced_date'), 'org', ['hapi_replaced_date'], unique=False) + op.create_index(op.f('ix_org_hapi_updated_date'), 'org', ['hapi_updated_date'], unique=False) + op.create_index(op.f('ix_org_reference_period_end'), 'org', ['reference_period_end'], unique=False) op.create_index(op.f('ix_org_reference_period_start'), 'org', ['reference_period_start'], unique=False) op.create_table('resource', sa.Column('id', sa.Integer(), nullable=False), @@ -101,11 +160,16 @@ def upgrade() -> None: sa.Column('update_date', sa.DateTime(), nullable=False), sa.Column('download_url', sa.String(length=1024), nullable=False), sa.Column('is_hxl', sa.Boolean(), nullable=False), + sa.Column('hapi_updated_date', sa.DateTime(), nullable=False), + sa.Column('hapi_replaced_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.CheckConstraint('(hapi_replaced_date IS NULL) OR (hapi_replaced_date >= hapi_updated_date)', name='hapi_dates'), sa.ForeignKeyConstraint(['dataset_ref'], ['dataset.id'], onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('download_url'), sa.UniqueConstraint('hdx_id') ) + op.create_index(op.f('ix_resource_hapi_replaced_date'), 'resource', ['hapi_replaced_date'], unique=False) + op.create_index(op.f('ix_resource_hapi_updated_date'), 'resource', ['hapi_updated_date'], unique=False) op.create_index(op.f('ix_resource_is_hxl'), 'resource', ['is_hxl'], unique=False) op.create_index(op.f('ix_resource_update_date'), 'resource', ['update_date'], unique=False) op.create_table('admin2', @@ -114,71 +178,70 @@ def upgrade() -> None: sa.Column('code', sa.String(length=128), nullable=False), sa.Column('name', sa.String(length=512), nullable=False), sa.Column('is_unspecified', sa.Boolean(), server_default=sa.text('FALSE'), nullable=False), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), + sa.Column('reference_period_start', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.Column('hapi_updated_date', sa.DateTime(), nullable=False), + sa.Column('hapi_replaced_date', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), + sa.CheckConstraint('(hapi_replaced_date IS NULL) OR (hapi_replaced_date >= hapi_updated_date)', name='hapi_dates'), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.ForeignKeyConstraint(['admin1_ref'], ['admin1.id'], onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('code') + sa.UniqueConstraint('code', 'hapi_updated_date') ) - op.create_table('operational_presence', + op.create_index(op.f('ix_admin2_hapi_replaced_date'), 'admin2', ['hapi_replaced_date'], unique=False) + op.create_index(op.f('ix_admin2_hapi_updated_date'), 'admin2', ['hapi_updated_date'], unique=False) + op.create_index(op.f('ix_admin2_reference_period_end'), 'admin2', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_admin2_reference_period_start'), 'admin2', ['reference_period_start'], unique=False) + op.create_table('food_security', sa.Column('id', sa.Integer(), nullable=False), sa.Column('resource_ref', sa.Integer(), nullable=False), - sa.Column('org_ref', sa.Integer(), nullable=False), - sa.Column('sector_code', sa.String(length=32), nullable=False), sa.Column('admin2_ref', sa.Integer(), nullable=False), + sa.Column('ipc_phase_code', sa.String(length=32), nullable=False), + sa.Column('ipc_type_code', sa.String(length=32), nullable=False), + sa.Column('population_in_phase', sa.Integer(), nullable=False), + sa.Column('population_fraction_in_phase', sa.Float(), nullable=False), sa.Column('reference_period_start', sa.DateTime(), nullable=False), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('source_data', sa.Text(), nullable=True), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.ForeignKeyConstraint(['admin2_ref'], ['admin2.id'], onupdate='CASCADE'), - sa.ForeignKeyConstraint(['org_ref'], ['org.id'], onupdate='CASCADE'), + sa.ForeignKeyConstraint(['ipc_phase_code'], ['ipc_phase.code'], onupdate='CASCADE'), + sa.ForeignKeyConstraint(['ipc_type_code'], ['ipc_type.code'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['resource_ref'], ['resource.id'], onupdate='CASCADE', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['sector_code'], ['sector.code'], onupdate='CASCADE'), sa.PrimaryKeyConstraint('id') ) - op.create_index(op.f('ix_operational_presence_reference_period_start'), 'operational_presence', ['reference_period_start'], unique=False) - op.create_table('population', + op.create_index(op.f('ix_food_security_population_fraction_in_phase'), 'food_security', ['population_fraction_in_phase'], unique=False) + op.create_index(op.f('ix_food_security_population_in_phase'), 'food_security', ['population_in_phase'], unique=False) + op.create_index(op.f('ix_food_security_reference_period_end'), 'food_security', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_food_security_reference_period_start'), 'food_security', ['reference_period_start'], unique=False) + op.create_table('humanitarian_needs', sa.Column('id', sa.Integer(), nullable=False), sa.Column('resource_ref', sa.Integer(), nullable=False), sa.Column('admin2_ref', sa.Integer(), nullable=False), + sa.Column('population_status_code', sa.String(length=32), nullable=True), + sa.Column('population_group_code', sa.String(length=32), nullable=True), + sa.Column('sector_code', sa.String(length=32), nullable=True), sa.Column('gender_code', sa.String(length=1), nullable=True), sa.Column('age_range_code', sa.String(length=32), nullable=True), + sa.Column('disabled_marker', sa.Boolean(), server_default=sa.text('NULL'), nullable=True), sa.Column('population', sa.Integer(), nullable=False), sa.Column('reference_period_start', sa.DateTime(), nullable=False), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('source_data', sa.Text(), nullable=True), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), + sa.CheckConstraint('population >= 0', name='population'), sa.ForeignKeyConstraint(['admin2_ref'], ['admin2.id'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['age_range_code'], ['age_range.code'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['gender_code'], ['gender.code'], onupdate='CASCADE'), + sa.ForeignKeyConstraint(['population_group_code'], ['population_group.code'], onupdate='CASCADE'), + sa.ForeignKeyConstraint(['population_status_code'], ['population_status.code'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['resource_ref'], ['resource.id'], onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['sector_code'], ['sector.code'], onupdate='CASCADE'), sa.PrimaryKeyConstraint('id') ) - op.create_index(op.f('ix_population_population'), 'population', ['population'], unique=False) - # ### end Alembic commands ### - - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('ipc_phase', - sa.Column('code', sa.String(length=32), nullable=False), - sa.Column('name', sa.String(length=32), nullable=False), - sa.Column('description', sa.String(length=512), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_table('ipc_type', - sa.Column('code', sa.String(length=32), nullable=False), - sa.Column('description', sa.String(length=512), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_table('population_group', - sa.Column('code', sa.String(length=32), nullable=False), - sa.Column('description', sa.String(length=512), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_index(op.f('ix_population_group_description'), 'population_group', ['description'], unique=False) - op.create_table('population_status', - sa.Column('code', sa.String(length=32), nullable=False), - sa.Column('description', sa.String(length=512), nullable=False), - sa.PrimaryKeyConstraint('code') - ) - op.create_index(op.f('ix_population_status_description'), 'population_status', ['description'], unique=False) + op.create_index(op.f('ix_humanitarian_needs_population'), 'humanitarian_needs', ['population'], unique=False) + op.create_index(op.f('ix_humanitarian_needs_reference_period_end'), 'humanitarian_needs', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_humanitarian_needs_reference_period_start'), 'humanitarian_needs', ['reference_period_start'], unique=False) op.create_table('national_risk', sa.Column('id', sa.Integer(), nullable=False), sa.Column('resource_ref', sa.Integer(), nullable=False), @@ -191,96 +254,122 @@ def upgrade() -> None: sa.Column('coping_capacity_risk', sa.Float(), nullable=False), sa.Column('meta_missing_indicators_pct', sa.Float(), nullable=True), sa.Column('meta_avg_recentness_years', sa.Float(), nullable=True), - sa.Column('reference_period_start', sa.DateTime(), nullable=False), + sa.Column('reference_period_start', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('source_data', sa.Text(), nullable=True), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), + sa.CheckConstraint('meta_avg_recentness_years >= 0.0', name='meta_avg_recentness_years'), sa.ForeignKeyConstraint(['admin2_ref'], ['admin2.id'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['resource_ref'], ['resource.id'], onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id') ) + op.create_index(op.f('ix_national_risk_reference_period_end'), 'national_risk', ['reference_period_end'], unique=False) op.create_index(op.f('ix_national_risk_reference_period_start'), 'national_risk', ['reference_period_start'], unique=False) - op.create_table('food_security', + op.create_table('operational_presence', sa.Column('id', sa.Integer(), nullable=False), sa.Column('resource_ref', sa.Integer(), nullable=False), sa.Column('admin2_ref', sa.Integer(), nullable=False), - sa.Column('ipc_phase_code', sa.String(length=32), nullable=False), - sa.Column('ipc_type_code', sa.String(length=32), nullable=False), - sa.Column('population_in_phase', sa.Integer(), nullable=False), - sa.Column('population_fraction_in_phase', sa.Float(), nullable=False), + sa.Column('org_ref', sa.Integer(), nullable=False), + sa.Column('sector_code', sa.String(length=32), nullable=False), sa.Column('reference_period_start', sa.DateTime(), nullable=False), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('source_data', sa.Text(), nullable=True), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), sa.ForeignKeyConstraint(['admin2_ref'], ['admin2.id'], onupdate='CASCADE'), - sa.ForeignKeyConstraint(['ipc_phase_code'], ['ipc_phase.code'], onupdate='CASCADE'), - sa.ForeignKeyConstraint(['ipc_type_code'], ['ipc_type.code'], onupdate='CASCADE'), + sa.ForeignKeyConstraint(['org_ref'], ['org.id'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['resource_ref'], ['resource.id'], onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['sector_code'], ['sector.code'], onupdate='CASCADE'), sa.PrimaryKeyConstraint('id') ) - op.create_table('humanitarian_needs', + op.create_index(op.f('ix_operational_presence_reference_period_end'), 'operational_presence', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_operational_presence_reference_period_start'), 'operational_presence', ['reference_period_start'], unique=False) + op.create_table('population', sa.Column('id', sa.Integer(), nullable=False), sa.Column('resource_ref', sa.Integer(), nullable=False), sa.Column('admin2_ref', sa.Integer(), nullable=False), - sa.Column('population_status_code', sa.String(length=32), nullable=True), - sa.Column('population_group_code', sa.String(length=32), nullable=True), - sa.Column('sector_code', sa.String(length=32), nullable=True), sa.Column('gender_code', sa.String(length=1), nullable=True), sa.Column('age_range_code', sa.String(length=32), nullable=True), - sa.Column('disabled_marker', sa.Boolean(), server_default=sa.text('NULL'), nullable=True), sa.Column('population', sa.Integer(), nullable=False), sa.Column('reference_period_start', sa.DateTime(), nullable=False), sa.Column('reference_period_end', sa.DateTime(), server_default=sa.text('NULL'), nullable=True), sa.Column('source_data', sa.Text(), nullable=True), + sa.CheckConstraint('(reference_period_end >= reference_period_start) OR (reference_period_start IS NULL)', name='reference_period'), + sa.CheckConstraint('population >= 0', name='population'), sa.ForeignKeyConstraint(['admin2_ref'], ['admin2.id'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['age_range_code'], ['age_range.code'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['gender_code'], ['gender.code'], onupdate='CASCADE'), - sa.ForeignKeyConstraint(['population_group_code'], ['population_group.code'], onupdate='CASCADE'), - sa.ForeignKeyConstraint(['population_status_code'], ['population_status.code'], onupdate='CASCADE'), sa.ForeignKeyConstraint(['resource_ref'], ['resource.id'], onupdate='CASCADE', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['sector_code'], ['sector.code'], onupdate='CASCADE'), sa.PrimaryKeyConstraint('id') ) - op.create_index(op.f('ix_humanitarian_needs_population'), 'humanitarian_needs', ['population'], unique=False) - op.create_index(op.f('ix_humanitarian_needs_reference_period_start'), 'humanitarian_needs', ['reference_period_start'], unique=False) + op.create_index(op.f('ix_population_population'), 'population', ['population'], unique=False) + op.create_index(op.f('ix_population_reference_period_end'), 'population', ['reference_period_end'], unique=False) + op.create_index(op.f('ix_population_reference_period_start'), 'population', ['reference_period_start'], unique=False) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_population_reference_period_start'), table_name='population') + op.drop_index(op.f('ix_population_reference_period_end'), table_name='population') op.drop_index(op.f('ix_population_population'), table_name='population') op.drop_table('population') op.drop_index(op.f('ix_operational_presence_reference_period_start'), table_name='operational_presence') + op.drop_index(op.f('ix_operational_presence_reference_period_end'), table_name='operational_presence') op.drop_table('operational_presence') + op.drop_index(op.f('ix_national_risk_reference_period_start'), table_name='national_risk') + op.drop_index(op.f('ix_national_risk_reference_period_end'), table_name='national_risk') + op.drop_table('national_risk') + op.drop_index(op.f('ix_humanitarian_needs_reference_period_start'), table_name='humanitarian_needs') + op.drop_index(op.f('ix_humanitarian_needs_reference_period_end'), table_name='humanitarian_needs') + op.drop_index(op.f('ix_humanitarian_needs_population'), table_name='humanitarian_needs') + op.drop_table('humanitarian_needs') + op.drop_index(op.f('ix_food_security_reference_period_start'), table_name='food_security') + op.drop_index(op.f('ix_food_security_reference_period_end'), table_name='food_security') + op.drop_index(op.f('ix_food_security_population_in_phase'), table_name='food_security') + op.drop_index(op.f('ix_food_security_population_fraction_in_phase'), table_name='food_security') + op.drop_table('food_security') + op.drop_index(op.f('ix_admin2_reference_period_start'), table_name='admin2') + op.drop_index(op.f('ix_admin2_reference_period_end'), table_name='admin2') + op.drop_index(op.f('ix_admin2_hapi_updated_date'), table_name='admin2') + op.drop_index(op.f('ix_admin2_hapi_replaced_date'), table_name='admin2') op.drop_table('admin2') op.drop_index(op.f('ix_resource_update_date'), table_name='resource') op.drop_index(op.f('ix_resource_is_hxl'), table_name='resource') + op.drop_index(op.f('ix_resource_hapi_updated_date'), table_name='resource') + op.drop_index(op.f('ix_resource_hapi_replaced_date'), table_name='resource') op.drop_table('resource') op.drop_index(op.f('ix_org_reference_period_start'), table_name='org') + op.drop_index(op.f('ix_org_reference_period_end'), table_name='org') + op.drop_index(op.f('ix_org_hapi_updated_date'), table_name='org') + op.drop_index(op.f('ix_org_hapi_replaced_date'), table_name='org') op.drop_index(op.f('ix_org_acronym'), table_name='org') op.drop_table('org') + op.drop_index(op.f('ix_admin1_reference_period_start'), table_name='admin1') + op.drop_index(op.f('ix_admin1_reference_period_end'), table_name='admin1') + op.drop_index(op.f('ix_admin1_hapi_updated_date'), table_name='admin1') + op.drop_index(op.f('ix_admin1_hapi_replaced_date'), table_name='admin1') op.drop_table('admin1') - op.drop_index(op.f('ix_sector_reference_period_start'), table_name='sector') op.drop_index(op.f('ix_sector_name'), table_name='sector') op.drop_table('sector') - op.drop_table('org_type') - op.drop_table('location') - op.drop_table('gender') - op.drop_index(op.f('ix_dataset_hdx_provider_name'), table_name='dataset') - op.drop_index(op.f('ix_dataset_hdx_provider_stub'), table_name='dataset') - op.drop_table('dataset') - op.drop_table('age_range') - # ### end Alembic commands ### - - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_humanitarian_needs_reference_period_start'), table_name='humanitarian_needs') - op.drop_index(op.f('ix_humanitarian_needs_population'), table_name='humanitarian_needs') - op.drop_table('humanitarian_needs') - op.drop_table('food_security') - op.drop_index(op.f('ix_national_risk_reference_period_start'), table_name='national_risk') - op.drop_table('national_risk') op.drop_index(op.f('ix_population_status_description'), table_name='population_status') op.drop_table('population_status') op.drop_index(op.f('ix_population_group_description'), table_name='population_group') op.drop_table('population_group') + op.drop_index(op.f('ix_patch_state'), table_name='patch') + op.drop_index(op.f('ix_patch_patch_sequence_number'), table_name='patch') + op.drop_table('patch') + op.drop_index(op.f('ix_org_type_description'), table_name='org_type') + op.drop_table('org_type') + op.drop_index(op.f('ix_location_reference_period_start'), table_name='location') + op.drop_index(op.f('ix_location_reference_period_end'), table_name='location') + op.drop_index(op.f('ix_location_hapi_updated_date'), table_name='location') + op.drop_index(op.f('ix_location_hapi_replaced_date'), table_name='location') + op.drop_table('location') op.drop_table('ipc_type') op.drop_table('ipc_phase') + op.drop_table('gender') + op.drop_index(op.f('ix_dataset_hdx_provider_stub'), table_name='dataset') + op.drop_index(op.f('ix_dataset_hdx_provider_name'), table_name='dataset') + op.drop_table('dataset') + op.drop_table('age_range') # ### end Alembic commands ### diff --git a/alembic/versions/afd54d1a867e_insert_sample_data.py b/alembic/versions/afd54d1a867e_insert_sample_data.py deleted file mode 100644 index 0ea3ef78..00000000 --- a/alembic/versions/afd54d1a867e_insert_sample_data.py +++ /dev/null @@ -1,39 +0,0 @@ -"""insert sample data - -Revision ID: afd54d1a867e -Revises: 5ea413542a49 -Create Date: 2023-09-07 20:59:47.907634 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = 'afd54d1a867e' -down_revision: Union[str, None] = '5ea413542a49' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - with open('alembic/versions/afd54d1a867e_insert_sample_data.sql', 'r') as file: - sql_commands = file.read() - - # Execute SQL from file - op.execute(sql_commands) - - -def downgrade() -> None: - truncate_sql = '' - with open('alembic/versions/afd54d1a867e_insert_sample_data.sql', 'r') as file: - for line in file: - line = line.lower().strip() - if line.startswith('insert into'): - table_name = line.split(' ')[2] - truncate_sql += f'TRUNCATE TABLE {table_name} CASCADE; \n' - - print(f'The following sql code will be executed for downgrade:\n {truncate_sql}') - op.execute(truncate_sql) diff --git a/docs/contact.md b/docs/contact.md index 883e37bf..adb804e3 100644 --- a/docs/contact.md +++ b/docs/contact.md @@ -1,9 +1,9 @@ # Contact Us +--- + We appreciate your interest in our work. Here is how to contact us and get involved: -- [Bug reports and feature requests - TBD](fix/this/link) for HAPI are welcome. - If you have questions or comments about HAPI or the HDX platform, send an email to [hdx@un.org](mailto:hdx@un.org). - If you would like to be involved in periodic user research about HDX or related services, fill in [this form](https://docs.google.com/forms/d/e/1FAIpQLSdjN3mcDJ8BX-nu4F1veKEa8dYPlRvVcyahev8QjX7qHtha4g/viewform) and we will be in touch. -- For general comments or project ideas, send an email to [centrehumdata@un.org](mailto:centrehumdata@un.org) and we will respond as soon as we can. - [Sign up](bit.ly/humdatamailing) to receive the Centre’s newsletter for updates on our work. Visit bit.ly/humdatamailing \ No newline at end of file diff --git a/docs/data.md b/docs/data.md index 5343a41e..9af716a4 100644 --- a/docs/data.md +++ b/docs/data.md @@ -1,29 +1,33 @@ # Data Coverage -| | 3w | food_security | humanitarian_needs | national_risk | population | -|:----------------------------------:|:----------:|:-------------:|:------------------:|:-------------:|:----------:| -| Afghanistan | Yes (adm2) | No | Yes (adm2) | Yes (adm0) | Yes (adm1) | -| Burkina Faso | No | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Cameroon | No | Yes (adm2) | No | Yes (adm0) | Yes (adm1) | -| Central African Republic | No | Yes (adm2) | No | Yes (adm0) | No | -| Chad | No | Yes (adm2) | Yes (adm2) | Yes (adm0) | Yes (adm2) | -| Colombia | No | No | No | Yes (adm0) | Yes (adm2) | -| Democratic Republic of the Congo | No | No | No | Yes (adm0) | Yes (adm2) | -| El Salvador | No | No | No | Yes (adm0) | Yes (adm2) | -| Ethiopia | No | No | No | Yes (adm0) | Yes (adm2) | -| Guatemala | No | No | No | Yes (adm0) | Yes (adm2) | -| Haiti | No | No | No | Yes (adm0) | Yes (adm2) | -| Honduras | No | No | No | Yes (adm0) | Yes (adm2) | -| Mali | Yes (adm2) | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Mozambique | No | No | No | Yes (adm0) | Yes (adm2) | -| Myanmar | No | No | No | Yes (adm0) | Yes (adm2) | -| Niger | No | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Nigeria | Yes (adm2) | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Somalia | No | No | No | Yes (adm0) | Yes (adm2) | -| South Sudan | No | No | No | Yes (adm0) | Yes (adm2) | -| State of Palestine | No | No | No | Yes (adm0) | Yes (adm1) | -| Sudan | No | No | No | Yes (adm0) | Yes (adm1) | -| Syrian Arab Republic | No | No | No | Yes (adm0) | No | -| Ukraine | No | No | No | Yes (adm0) | Yes (adm1) | -| Venezuela (Bolivarian Republic of) | No | No | No | Yes (adm0) | Yes (adm2) | -| Yemen | No | No | Yes (adm2) | Yes (adm0) | No | \ No newline at end of file +--- + +The initial aim of HDX HAPI is to cover all countries that have a humanitarian response plan and cover the data categories from HDX data grids + +| | Affected People - Humanitarian Needs | Affected People - Refugees | Coordination Context - Conflict event | Coordination Context - Funding | Coordination Context - National Risk | Coordination Context - Operational Presence | Food - Food Price | Food - Food Security | Population Social - Population | Population Social - Poverty-rate | +|:----------------------------------:|:----------------------------------:|:----------------------------------:|:-----------------------------------:|:----------------------------:|:----------------------------------:|:-----------------------------------------:|:---------------:|:------------------:|:----------------------------:|:------------------------------:| +| Afghanistan | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm1) | Yes (adm1) | +| Burkina Faso | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | +| Cameroon | Yes (adm1) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | Yes (adm1) | +| Central African Republic | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | Yes (adm2) | No | Yes (adm1) | +| Chad | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm1) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | +| Colombia | Yes (adm1) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Democratic Republic of the Congo | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| El Salvador | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Ethiopia | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Guatemala | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Haiti | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Honduras | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Mali | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | +| Mozambique | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Myanmar | Yes (adm1) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | Yes (adm2) | Yes (adm1) | +| Niger | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | +| Nigeria | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm2) | Yes (adm1) | +| Somalia | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | Yes (adm2) | No | +| South Sudan | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | No | Yes (adm2) | No | +| State of Palestine | No | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | No | Yes (adm1) | Yes (adm1) | +| Sudan | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm0) | Yes (adm2) | No | Yes (adm1) | Yes (adm1) | +| Syrian Arab Republic | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | No | No | +| Ukraine | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | No | Yes (adm2) | No | Yes (adm1) | Yes (adm1) | +| Venezuela (Bolivarian Republic of) | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | No | No | Yes (adm2) | No | +| Yemen | Yes (adm2) | Yes (adm0) | Yes (adm2) | Yes (adm0) | Yes (adm0) | Yes (adm2) | Yes (adm2) | No | No | Yes (adm1) | diff --git a/docs/examples.md b/docs/examples.md index c99bfb59..286aaf14 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -1,12 +1,13 @@ # Code Examples +--- + +In each of these code examples you will need to insert your generated app identifier in place of { your app identifier } text. To generated an app identifier follow the getting started guide. + ## 1. Query a theme end point and loop through pages Themes are the core data of the API. The results are paginated and so multiple calls are needed to get the whole dataset. Below we query the 3W theme for Afghanistan and return all results into a single object. To query a different theme or country change the constant variable of ```THEME``` to another theme or ```LOCATION``` to a different ISO3 country code. -The current themes supported ```population```, ```3w```. - -The current countries supported ```AFG```, ```MLI```, ```NGA``` ### Python @@ -37,22 +38,21 @@ def fetch_data(base_url, limit=1000): print(f"Getting results {offset} to {offset+limit-1}") json_response = json.loads(response.read()) - results.extend(json_response) + results.extend(json_response['data']) # If the returned results are less than the limit, it's the last page - if len(json_response) < limit: + if len(json_response['data']) < limit: break idx += 1 return results -THEME = "3w" +THEME = "coordination-context/operational-presence" LOCATION = "AFG" -BASE_URL = f"https://placeholder.url/api/v1/themes/{THEME}?output_format=json&location_code={LOCATION}" +BASE_URL = f"https://stage.hapi-humdata-org.ahconu.org/api/v1/{THEME}?output_format=json&location_code={LOCATION}&app_identifier={ your app identifier }" LIMIT = 1000 - results = fetch_data(BASE_URL, LIMIT) print(results) ``` @@ -73,10 +73,10 @@ async function fetchData(baseUrl, limit = 1000) { const response = await fetch(url); const jsonResponse = await response.json(); - results = results.concat(jsonResponse); + results = results.concat(jsonResponse['data']); // If the returned results are less than the limit, it's the last page - if (jsonResponse.length < limit) { + if (jsonResponse['data'].length < limit) { break; } @@ -86,9 +86,9 @@ async function fetchData(baseUrl, limit = 1000) { return results; } -const THEME = "3W" +const THEME = "coordination-context/operational-presence" const LOCATION = "AFG" -const BASE_URL = `https://placeholder.url/api/v1/themes/${THEME}?output_format=json&location_code=${LOCATION}`; +const BASE_URL = `https://stage.hapi-humdata-org.ahconu.org/api/v1/${THEME}?output_format=json&location_code=${LOCATION}&app_identifier={ your app identifier }`; const LIMIT = 1000; window.onload = async function() { @@ -115,10 +115,10 @@ async function fetchData(baseUrl, limit = 1000) { const response = await fetch(url); const jsonResponse = await response.json(); - results = results.concat(jsonResponse); + results = results.concat(jsonResponse['data']); // If the returned results are less than the limit, it's the last page - if (jsonResponse.length < limit) { + if (jsonResponse['data'].length < limit) { break; } @@ -128,9 +128,9 @@ async function fetchData(baseUrl, limit = 1000) { return results; } -const THEME = "3W" +const THEME = "coordination-context/operational-presence" const LOCATION = "AFG" -const BASE_URL = `https://placeholder.url/api/v1/themes/${THEME}?output_format=json&location_code=${LOCATION}`; +const BASE_URL = `https://stage.hapi-humdata-org.ahconu.org/api/v1/${THEME}?output_format=json&location_code=${LOCATION}&app_identifier={ your app identifier }`; const LIMIT = 1000; fetchData(BASE_URL, LIMIT).then(results => { @@ -158,22 +158,22 @@ fetch_data <- function(base_url, limit = 1000) { json_response <- fromJSON(content(response, "text")) - results <- append(results, list(json_response)) + results <- append(results, list(json_response$data)) # If the returned results are less than the limit, it's the last page - if(nrow(json_response) < limit) { + if(length(json_response$data) < limit) { break } idx <- idx + 1 } - return(do.call(rbind, results)) + return(results) } -THEME <- "3w" +THEME <- "coordination-context/operational-presence" LOCATION <- "AFG" -BASE_URL <- paste0("https://placeholder.url/api/v1/themes/", THEME, "?output_format=json&location_code=", LOCATION) +BASE_URL <- paste0("https://stage.hapi-humdata-org.ahconu.org/api/v1/", THEME, "?output_format=json&location_code=", LOCATION, "&app_identifier={ your app identifier }") LIMIT <- 1000 results <- fetch_data(BASE_URL, LIMIT) @@ -184,7 +184,7 @@ print(results) It is possible to add extra filters to the call to get a subset of results. To see the full set of filters that can be used for each theme, please check this documentation: -https://placeholder.url/docs#/humanitarian-response/ +https://stage.hapi-humdata-org.ahconu.org/docs#/humanitarian-response/ ### Python @@ -194,14 +194,14 @@ Change the code to include a new parameter in the URL. ```python SECTOR= urllib.parse.quote("Emergency Shelter and NFI") -BASE_URL = f"https://placeholder.url/api/v1/themes/{THEME}?output_format=json&location_code={LOCATION}§or_name={SECTOR}" +BASE_URL = f"https://stage.hapi-humdata-org.ahconu.org/api/v1/{THEME}?output_format=json&location_code={LOCATION}§or_name={SECTOR}&app_identifier={ your app identifier }" ``` #### Filter by Admin1 ```python ADMIN1= "AF01" -BASE_URL = f"https://placeholder.url/api/v1/themes/{THEME}?output_format=json&location_code={LOCATION}&admin1_code={ADMIN1}" +BASE_URL = f"https://stage.hapi-humdata-org.ahconu.org/api/v1/{THEME}?output_format=json&location_code={LOCATION}&admin1_code={ADMIN1}&app_identifier={ your app identifier }" ``` ### Plain Javascript and Node @@ -212,14 +212,14 @@ Change the code to include a new parameter in the URL. ```javascript const SECTOR = "Emergency Shelter and NFI" -const BASE_URL = `https://placeholder.url/api/v1/themes/${THEME}?output_format=json&location_code=${LOCATION}§or_name=${SECTOR}`; +const BASE_URL = `https://stage.hapi-humdata-org.ahconu.org/api/v1/${THEME}?output_format=json&location_code=${LOCATION}§or_name=${SECTOR}&app_identifier={ your app identifier }`; ``` #### Filter by Admin1 ```javascript const ADMIN1 = "AF01" -const BASE_URL = `https://placeholder.url/api/v1/themes/${THEME}?output_format=json&location_code=${LOCATION}&admin1_code=${ADMIN1}`; +const BASE_URL = `https://stage.hapi-humdata-org.ahconu.org/api/v1/${THEME}?output_format=json&location_code=${LOCATION}&admin1_code=${ADMIN1}&app_identifier={ your app identifier }`; ``` ### R @@ -230,14 +230,14 @@ Change the code to include a new parameter in the URL. ```R SECTOR <- "Emergency Shelter and NFI" -BASE_URL <- paste0("https://placeholder.url/api/v1/themes/", THEME, "?output_format=json&location_code=", LOCATION, "§or_name=",SECTOR) +BASE_URL <- paste0("https://stage.hapi-humdata-org.ahconu.org/api/v1/", THEME, "?output_format=json&location_code=", LOCATION, "§or_name=",SECTOR,'&app_identifier={ your app identifier }') ``` #### Filter by Admin1 ```R ADMIN1 <- "AF01" -BASE_URL <- paste0("https://placeholder.url/api/v1/themes/", THEME, "?output_format=json&location_code=", LOCATION, "&admin1_code=",ADMIN1) +BASE_URL <- paste0("https://stage.hapi-humdata-org.ahconu.org/api/v1/", THEME, "?output_format=json&location_code=", LOCATION, "&admin1_code=",ADMIN1,'&app_identifier={ your app identifier }') ``` ## 3. Filter for admin level @@ -249,18 +249,18 @@ admin_level=1 ## 4. Get data from supporting tables -Each supporting table such as ```orgs```, ```orgs_type```, ```sector``` and more have a unique URL to call to get the range of possible values. Below we show the URL for getting of the sector names and codes. Change the code at the top to have a new ```BASEURL``` and remove the URL parameters above it. Full code examples can be seen in the example repo. +Each supporting table such as ```orgs```, ```orgs_type```, ```sector``` and more have a unique URL to call to get the range of possible values. Below we show the URL for getting of the sector names and codes. Change the code at the top to have a new ```BASEURL``` and remove the URL parameters above it. ### Python ```python -BASE_URL "https://placeholder.url/api/v1/sector?output_format=json&offset=0&limit=1000" +BASE_URL "https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/sector?output_format=json&offset=0&limit=1000&app_identifier={ your app identifier }" ``` ### Javascript ```javascript -CONST BASE_URL "https://placeholder.url/api/v1/sector?output_format=json&offset=0&limit=1000" +CONST BASE_URL "https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/sector?output_format=json&offset=0&limit=1000&app_identifier={ your app identifier }" ``` ## 5. Get admin level data for a country @@ -278,18 +278,19 @@ https://apps.itos.uga.edu/codv2api/api/v1/themes/cod-ab/locations/AFG/versions/c ``` [Full documentation](https://apps.itos.uga.edu/CODV2API/Help) + ## 6. Download as CSV The code examples so far have been using JSON output and then processing this data. To query this data as csv, change the ```output_format``` to ```csv``` as per the examples below. Visiting this URL through the browser will download the CSV to then be used on your computer. ```python -BASE_URL = "https://placeholder.url/api/v1/admin1?location_code=MLI&output_format=csv&offset=0&limit=1000" +BASE_URL = "https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/admin1?location_code=MLI&output_format=csv&offset=0&limit=1000&app_identifier={ your app identifier }" ``` ### Javascript ```javascript -CONST BASE_URL = "https://placeholder.url/api/v1/admin1?location_code=MLI&output_format=csv&offset=0&limit=1000" +CONST BASE_URL = "https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/admin1?location_code=MLI&output_format=csv&offset=0&limit=1000&app_identifier={ your app identifier }" ``` ## 7. Query Population and join to GeoJson from ITOS service @@ -318,15 +319,15 @@ def fetch_data(base_url, limit=1000): while True: offset = idx * limit url = f"{base_url}&offset={offset}&limit={limit}" - + print(url) with urllib.request.urlopen(url) as response: print(f"Getting results {offset} to {offset+limit-1}") json_response = json.loads(response.read()) - results.extend(json_response) + results.extend(json_response['data']) # If the returned results are less than the limit, it's the last page - if len(json_response) < limit: + if len(json_response['data']) < limit: break idx += 1 @@ -379,11 +380,11 @@ def save_geojson(geojson, filename): json.dump(geojson, file) print(f"GeoJSON saved to {filename}") -THEME = "population" +THEME = "population-social/population" LOCATION = "AFG" -AGE_RANGE_CODE = "80%2B" +AGE_RANGE_CODE = "0-4" GENDER = "f" -BASE_URL = f"https://placeholder.url/api/v1/themes/{THEME}?output_format=json&location_code={LOCATION}&age_range_code={AGE_RANGE_CODE}&gender={GENDER}&admin1_is_unspecified=false&admin2_is_unspecified=true" +BASE_URL = f"https://stage.hapi-humdata-org.ahconu.org/api/v1/{THEME}?output_format=json&location_code={LOCATION}&age_range_code={AGE_RANGE_CODE}&gender={GENDER}&admin_level=1&app_identifier={ your app identifier }" LIMIT = 1000 results = fetch_data(BASE_URL, LIMIT) @@ -412,9 +413,9 @@ fetch_data <- function(base_url, limit=1000) { stop_for_status(response) json_response <- content(response, "parsed", type = "application/json") - results <- c(results, json_response) + results <- c(results, json_response$data) - if (length(json_response) < limit) { + if (length(json_response$data) < limit) { break } idx <- idx + 1 @@ -449,11 +450,11 @@ save_geojson <- function(geojson, filename) { } # Use the functions -THEME <- "population" +THEME <- "population-social/population" LOCATION <- "AFG" -AGE_RANGE_CODE <- "80%2B" +AGE_RANGE_CODE <- "0-4" GENDER <- "f" -BASE_URL <- sprintf("https://placeholder.url/api/v1/themes/%s?output_format=json&location_code=%s&age_range_code=%s&gender=%s&admin1_is_unspecified=false&admin2_is_unspecified=true", +BASE_URL <- sprintf("https://stage.hapi-humdata-org.ahconu.org/api/v1/%s?output_format=json&location_code=%s&age_range_code=%s&gender=%s&admin_level=1&app_identifier={ your app identifier }", THEME, LOCATION, AGE_RANGE_CODE, GENDER) LIMIT <- 1000 results <- fetch_data(BASE_URL, LIMIT) @@ -465,7 +466,7 @@ save_geojson(updated_geojson, 'updated_data.geojson') ``` -## 8. Load data intoa google spreadsheet using app script and periodically update +## 8. Load data into a google spreadsheet using app script and periodically update ### App script @@ -483,7 +484,7 @@ A simple script that will fetch the API data and place it in the spreadsheet ```javascript function loadApiData() { - var baseUrl = "https://placeholder.url/api/v1/themes/3w?output_format=json"; + var baseUrl = "https://stage.hapi-humdata-org.ahconu.org/api/v1/coordination-context/operational-presence?output_format=json&location_code=AFG&app_identifier={ your app identifier }"; var limit = 10000; var offset = 0; @@ -496,8 +497,8 @@ function loadApiData() { var jsonData = JSON.parse(response.getContentText()); // If there's no data or less data than the limit, break out of the loop - if (!jsonData.length || jsonData.length < limit) { - allData = allData.concat(jsonData); + if (!jsonData.data.length || jsonData.data.length < limit) { + allData = allData.concat(jsonData.data); break; } @@ -545,4 +546,13 @@ function loadApiData() { When you run the script for the first time or set up a trigger, it will ask for permissions. Make sure to grant them so the script can access the external API and modify your Google Spreadsheet. -Now, the script will run daily at the time you specified and load the API data into your Google Spreadsheet. \ No newline at end of file +Now, the script will run daily at the time you specified and load the API data into your Google Spreadsheet. + +## 9. Getting meta dataset for a resource + +In this code example take the resource ID of a dataset and then query the meta data end point to get the contributing organisation for the dataset. + + +``` +https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/resource?hdx_id=562e7757-0683-4d61-87bd-a7c94af2ee38&update_date_min=2020-01-01&update_date_max=2024-12-31&output_format=json&offset=0&limit=1000 +``` \ No newline at end of file diff --git a/docs/geo.md b/docs/geo.md index e69de29b..8bad03fb 100644 --- a/docs/geo.md +++ b/docs/geo.md @@ -0,0 +1,38 @@ +# Geo Data + +--- + +Much of the data in HAPI references a geographical area. The complimentary geodata to this is provided by ITOS via ARCGIS service accessible here: [https://apps.itos.uga.edu/CODV2API/api/v1/](https://apps.itos.uga.edu/CODV2API/api/v1/) + + +This service contains the common operational datasets administration boundaries and it can be accessed in a number formats. Enhanced datasets have been standardised and contain the data formatting. Check the [CODs Dashboard](https://cod.unocha.org/) to see the status of different countries. + + +Below are examples of how to access the shapefile and geojson for the supported countries + + +## Geojson + + +URL (replace iso3 and admlevel with appropriate values) +``` +https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/{ iso3 }/versions/current/geoJSON/{ admlevel } +``` + +E.g. to get the boundaries for Afghanistan admin level 1 in geojson use the URL: + + +[https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/AFG/versions/current/geoJSON/1](https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/AFG/versions/current/geoJSON/1) + + +## SHP file + + +URL (replace iso3 and admlevel with appropriate values) +``` +https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/{ iso3 }/versions/current/SHP/{ admlevel } +``` + +E.g. to get the boundaries for Afghanistan admin level 1 in SHP file use the URL: + +[https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/AFG/versions/current/SHP/1](https://apps.itos.uga.edu/CODV2API/api/v1/themes/cod-ab/locations/AFG/versions/current/SHP/1) diff --git a/docs/getting-started.md b/docs/getting-started.md index 6d9bb87d..ef458460 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -1,143 +1,200 @@ # Getting Started with the API -In addtion to this getting started section we suggest exploring the [query interface](https://placeholder.url/docs) which details all available filtering options for each endpoint, providing a hands-on way to familiarize yourself with the API's structure. +--- + +Here you will find simple instructions to help you get started with using HAPI. In addition to this getting started section we suggest exploring the [query interface](https://stage.hapi-humdata-org.ahconu.org/docs) which details all available filtering options for each endpoint, providing a hands-on way to familiarise yourself with the API's structure. + Below, you will find example URLs to help you learn how to construct your API queries. These URLs can be entered directly into your web browser for immediate results. -## Accessing 3W Data +## Generating a key + +To access HAPI you need to generate an app identifier. This can be done via the the [sandbox interface encode_identifier endpoint](https://stage.hapi-humdata-org.ahconu.org/docs#/Utility/get_encoded_identifier_api_v1_encode_identifier_get). Enter your application name and email address and it will return the app identifier. The key must be included as a query string parameter e.g. + + +``` +https://stage.hapi-humdata-org.ahconu.org/api/v1/coordination-context/operational-presence?app_identifier={your app identifier} +``` + + + + +## Accessing Operational Presence Dta(3W) Data + + +Retrieve the latest Operational Presence (Who's doing What, Where) data for a specific country using the `location_code` filter and the country’s ISO3 code. The following example demonstrates how to get data for Mali: -Retrieve the latest 3W (Who's doing What, Where) data for a specific country using the `location_name` filter. The following example demonstrates how to get data for Mali: Copy this link into your browser to see the results + ```plaintext -https://placeholder.url/api/v1/themes/3w?location_name=Mali&output_format=json&offset=0&limit=10000 +https://stage.hapi-humdata-org.ahconu.org/api/v1/coordination-context/operational-presence?location_code=mli&output_format=json&offset=0&limit=1000&app_identifier={your app identifier} + + ``` + A single row of the result looks like this: + ```JSON [ - { - "sector_code":"NUT", - "dataset_hdx_stub":"mali-operational-presence", - "resource_hdx_id":"a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", - "org_acronym":"ACF-E", - "org_name":"Action Contre la Faim-Espagne", - "sector_name":"Nutrition", - "location_code":"MLI", - "location_name":"Mali", - "admin1_code":"ML07", - "admin1_name":"Gao", - "admin2_code":"ML0702", - "admin2_name":"Bourem" - } + { + "sector_code":"NUT", + "dataset_hdx_stub":"mali-operational-presence", + "resource_hdx_id":"a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", + "org_acronym":"ACF-E", + "org_name":"Action Contre la Faim-Espagne", + "sector_name":"Nutrition", + "location_code":"MLI", + "location_name":"Mali", + "admin1_code":"ML07", + "admin1_name":"Gao", + "admin2_code":"ML0702", + "admin2_name":"Bourem" + } ] ``` + The above result was in JSON. It is also possible to get a CSV by changing the output format as follows ```output_format=csv```. + The maximum number of rows returned in a single response is 10,000. To access more data beyond this limit, you can paginate through results by increasing the ```offset``` parameter as shown here: + ```plaintext -https://placeholder.url/api/v1/themes/3w?location_name=Mali&output_format=json&offset=10000&limit=10000 +https://stage.hapi-humdata-org.ahconu.org/api/v1/coordination-context/operational-presence?location_code=MLI&output_format=json&offset=1000&limit=1000&app_identifier={your app identifier} ``` + Check the code example section to see code for querying multiple pages and loading into a single result. -With the 3w theme endpoint there are a variety of filters to target your results include ```sector_name```, ```admin2_code``` and ```org_name``` + +With the operational presence theme endpoint there are a variety of filters to target your results include ```sector_name```, ```admin2_code``` and ```org_name``` + This query gets all of the WASH activities happening in Yobe, Nigeria using the ```sector_name``` and ```admin1_name``` filter + ```plaintext -https://placeholder.url/api/v1/themes/3w?sector_name=Water%20Sanitation%20Hygiene&location_name=Nigeria&admin1_name=Yobe&output_format=json&offset=0&limit=1000 +https://stage.hapi-humdata-org.ahconu.org/api/v1/coordination-context/operational-presence?sector_name=Water%20Sanitation%20Hygiene&location_name=Nigeria&admin1_name=Yobe&output_format=json&offset=0&limit=1000&app_identifier={your app identifier} ``` -Remember to check the [technical documentation](https://placeholder.url/docs) for the full list of filters available + +Remember to check the [sandbox](https://placeholder.url/docs) for the full list of filters available + ## Exploring Population Data + The Population endpoint delivers detailed demographic breakdowns by age range and gender. The example query below uses ```location_code``` rather than ```location_name``` to use the iso3 code for Afghanistan ```AFG```. In addition it also uses the ```admin_level=1``` filter to get only admin level 1 results. + ``` -https://placeholder.url/api/v1/themes/population?location_code=AFG&output_format=json&offset=0&limit=10000&admin_level=1 +https://stage.hapi-humdata-org.ahconu.org/api/v1/population-social/population?location_code=AFG&output_format=json&offset=0&limit=1000&admin_level=1&app_identifier={your app identifier} ``` + To refine this query to retrieve population statistics specifically for males under the age of 5, append the age_range_code and gender_code filters to your request: + ``` -https://placeholder.url/api/v1/themes/population?location_code=AFG&output_format=json&offset=0&limit=10000&admin_level=1&age_range_code=0-4&gender_code=m +https://stage.hapi-humdata-org.ahconu.org/api/v1/population-social/population?location_code=AFG&output_format=json&offset=0&limit=10000&admin_level=1&age_range_code=0-4&gender_code=m&app_identifier={your app identifier} ``` + By tailoring these filters, you can obtain a variety of demographic insights from the API + ## Understanding Supporting Tables -Each theme within our API is bolstered by associated supporting tables. These tables are essential for understanding the range of possible values you can work with in the theme-specific tables. For example, if you're filtering by age range—as we did with `age_range_code=0-4`—you'll want to know what age range codes are available. -You can retrieve a list of possible age ranges by querying the `age_range` support table like so: +Each theme within our API is bolstered by associated supporting tables. These tables are essential for understanding the range of possible values you can work with in the theme-specific tables. For example, if you're filtering by sector, such as, `sector=Nutrition`—you'll want to know what sectors are available. + + +You can retrieve a list of possible age ranges by querying the `sector` support table like so: + ```plaintext -https://placeholder.url/api/v1/age_range?output_format=json&offset=0&limit=1000 +https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/sector?output_format=json&offset=0&limit=1000&app_identifier=Z2V0dGluZ3Mtc3RhcnRlZDpzaW1vbi5qb2huc29uQHVuLm9yZw== ``` -This functionality is not limited to age ranges. There are similar support tables for a variety of filters such as organizations, genders, sectors, and more. Querying these support tables provides you with the necessary information to apply precise filters and extract the data that's most relevant to your needs. + +This functionality is not limited to age ranges. There are similar support tables for a variety of filters such as organisations, genders, sectors, and more. Querying these support tables provides you with the necessary information to apply precise filters and extract the data that's most relevant to your needs. + ## Getting Metadata through API Queries + When you inspect the JSON output from an initial API query, you'll encounter a variety of detailed fields. Take a look at this sample output: + ```json [ - { - "sector_code": "NUT", - "dataset_hdx_stub": "mali-operational-presence", - "resource_hdx_id": "a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", - "org_acronym": "ACF-E", - "org_name": "Action Contre la Faim-Espagne", - "sector_name": "Nutrition", - "location_code": "MLI", - "location_name": "Mali", - "admin1_code": "ML07", - "admin1_name": "Gao", - "admin2_code": "ML0702", - "admin2_name": "Bourem" - } + { + "sector_code": "NUT", + "dataset_hdx_stub": "mali-operational-presence", + "resource_hdx_id": "a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", + "org_acronym": "ACF-E", + "org_name": "Action Contre la Faim-Espagne", + "sector_name": "Nutrition", + "location_code": "MLI", + "location_name": "Mali", + "admin1_code": "ML07", + "admin1_name": "Gao", + "admin2_code": "ML0702", + "admin2_name": "Bourem" + } ] ``` -Among these fields, ```dataset_hdx_stub``` and ```resource_hdx_id``` are keys to unlocking metadata about the dataset. This metadata includes the last update date, the organization responsible for the data, and links to download the original dataset. + +Among these fields, ```dataset_hdx_stub``` and ```resource_hdx_id``` are keys to unlocking metadata about the dataset. This metadata includes the last update date, the organisation responsible for the data, and links to download the original dataset. + To dive deeper into the data's origin, use the resource_hdx_id in the resource endpoint URL: + ``` -https://placeholder.url/api/v1/resource?hdx_id=a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1&update_date_min=2020-01-01&update_date_max=2024-12-31&output_format=json&offset=0&limit=1000 +https://stage.hapi-humdata-org.ahconu.org/api/v1/metadata/resource?hdx_id=b28928be-1847-408f-b3cd-9b87b596c710&update_date_min=2020-01-01&update_date_max=2024-12-31&output_format=jsonlimit=100&offset=00&app_identifier={your app identifier} ``` + Executing this query provides a response like the following: + ```JSON -[ - { - "hdx_id": "a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", - "name": "MALI_3W_June_2023", - "format": "XLSX", - "update_date": "2023-09-28T10:45:27", - "is_hxl": false, - "download_url": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3/resource/a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1/download/mali-3w_maj-2.xlsx", - "dataset_hdx_id": "d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3", - "dataset_hdx_stub": "mali-operational-presence", - "dataset_title": "Mali: Operational Presence", - "dataset_hdx_provider_stub": "ocha-mali", - "dataset_hdx_provider_name": "OCHA Mali", - "hdx_link": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3/resource/a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1/", - "hdx_api_link": "https://data.humdata.org/api/action/resource_show?id=a92fd2e8-4cbc-4366-92a8-1ffbbd6659d1", - "dataset_hdx_link": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3/", - "dataset_hdx_api_link": "https://data.humdata.org/api/action/package_show?id=d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3" - } -] +{ + "data": [ + { + "hdx_id": "b28928be-1847-408f-b3cd-9b87b596c710", + "dataset_hdx_id": "d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3", + "name": "MALI_3W_December_2023", + "format": "XLSX", + "update_date": "2024-03-01T12:33:46", + "is_hxl": true, + "download_url": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3/resource/b28928be-1847-408f-b3cd-9b87b596c710/download/mali-3w-presence-operationnelle-december-2023.xlsx", + "hapi_updated_date": "2024-05-30T19:30:19.932113", + "dataset_hdx_stub": "mali-operational-presence", + "dataset_title": "Mali: Operational Presence", + "dataset_hdx_provider_stub": "ocha-mali", + "dataset_hdx_provider_name": "OCHA Mali", + "hdx_link": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3/resource/b28928be-1847-408f-b3cd-9b87b596c710", + "hdx_api_link": "https://data.humdata.org/api/action/resource_show?id=b28928be-1847-408f-b3cd-9b87b596c710", + "dataset_hdx_link": "https://data.humdata.org/dataset/d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3", + "dataset_hdx_api_link": "https://data.humdata.org/api/action/package_show?id=d7ab89e4-bcb2-4127-be3c-5e8cf804ffd3", + "provider_hdx_link": "https://data.humdata.org/organization/ocha-mali", + "provider_hdx_api_link": "https://data.humdata.org/api/action/organization_show?id=ocha-mali" + } + ] +} ``` -This output gives you a comprehensive view of the dataset's metadata, including the update date, the contributing organization, and direct links to more information via the CKAN API and the original data file download. -As a starting point to effectively use our API, we encourage you to experiment with different queries using the technical documentation's query interface and review the provided code examples for guidance. +This output gives you a comprehensive view of the dataset's metadata, including the update date, the contributing organisation, and direct links to more information via the CKAN API and the original data file download. + + +As a starting point to effectively use our API, we encourage you to experiment with different queries using the [sandbox's](https://stage.hapi-humdata-org.ahconu.org/docs) query interface and review the provided code examples for guidance. + diff --git a/docs/index.md b/docs/index.md index bfd588b9..0e0f660a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,61 +1,52 @@ -# Summary - -For full documentation visit [the reference documentation](https://placeholder.url). Please note, in late 2023 and early 2024, HAPI is undergoing continual development. **Both the capabilities of the API and the data within it may change frequently.** - -HAPI is a service of the [Humanitarian Data Exchange (HDX)](https://data.humdata.org), part of UNOCHA's [Centre for Humanitarian Data](https://centre.humdata.org). The purpose of HAPI is to improve access to key humanitarian datasets taken from the HDX catalog data to better support automated visualisation and analysis. HAPI is primarily intended for application developers and data scientists working within the humanitarian community. - -HAPI provides a consistent, standardised and machine-readable interface to query and retrieve data from a set of high-value humanitarian indicators drawn from the HDX catalogue. With HAPI, the HDX team aims to provide a single point of access to critical humanitarian data in a standardised and structured way. - -As of November 2023, HAPI is in active development and early release. The number of indcators in HAPI is limited, and work is ongoing to continually add more data. The initial scope of HAPI will be the data included in the [HDX Data Grids](https://data.humdata.org/dashboards/overview-of-data-grids). - -# Data Coverage - -| | 3w | food_security | humanitarian_needs | national_risk | population | -|:----------------------------------:|:----------:|:-------------:|:------------------:|:-------------:|:----------:| -| Afghanistan | Yes (adm2) | No | Yes (adm2) | Yes (adm0) | Yes (adm1) | -| Burkina Faso | No | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Cameroon | No | Yes (adm2) | No | Yes (adm0) | Yes (adm1) | -| Central African Republic | No | Yes (adm2) | No | Yes (adm0) | No | -| Chad | No | Yes (adm2) | Yes (adm2) | Yes (adm0) | Yes (adm2) | -| Colombia | No | No | No | Yes (adm0) | Yes (adm2) | -| Democratic Republic of the Congo | No | No | No | Yes (adm0) | Yes (adm2) | -| El Salvador | No | No | No | Yes (adm0) | Yes (adm2) | -| Ethiopia | No | No | No | Yes (adm0) | Yes (adm2) | -| Guatemala | No | No | No | Yes (adm0) | Yes (adm2) | -| Haiti | No | No | No | Yes (adm0) | Yes (adm2) | -| Honduras | No | No | No | Yes (adm0) | Yes (adm2) | -| Mali | Yes (adm2) | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Mozambique | No | No | No | Yes (adm0) | Yes (adm2) | -| Myanmar | No | No | No | Yes (adm0) | Yes (adm2) | -| Niger | No | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Nigeria | Yes (adm2) | Yes (adm2) | No | Yes (adm0) | Yes (adm2) | -| Somalia | No | No | No | Yes (adm0) | Yes (adm2) | -| South Sudan | No | No | No | Yes (adm0) | Yes (adm2) | -| State of Palestine | No | No | No | Yes (adm0) | Yes (adm1) | -| Sudan | No | No | No | Yes (adm0) | Yes (adm1) | -| Syrian Arab Republic | No | No | No | Yes (adm0) | No | -| Ukraine | No | No | No | Yes (adm0) | Yes (adm1) | -| Venezuela (Bolivarian Republic of) | No | No | No | Yes (adm0) | Yes (adm2) | -| Yemen | No | No | Yes (adm2) | Yes (adm0) | No | +# Overview -# Terms Of Use +--- + + +The [HDX Humanitarian API](https://dev.data-humdata-org.ahconu.org/hapi)(HDX HAPI) is a way to access standardised indicators from multiple sources to automate workflows and visualisations + + +HDX HAPI is in beta phase, and we are seeking feedback. To share your thoughts or join our slack channel, send an email to [hdx@un.org](hdx@un.org). + + +The initial scope of HDX HAPI will be the data included in the [HDX Data Grids](https://data.humdata.org/dashboards/overview-of-data-grids). Work is ongoing to add more data. + +# App Identifier +To access HDX HAPI you need to generate an API identifier. This can be done via the the [sandbox interface encode_identifier endpoint](https://stage.hapi-humdata-org.ahconu.org/docs#/Utility/get_encoded_identifier_api_v1_encode_identifier_get). Enter your application name and email address and it will return the app identifier. The key must be included as a query string parameter e.g. + +``` +https://stage.hapi-humdata-org.ahconu.org/api/v1/themes/3w?app_identifier={your app identifier} +``` + +# The Structure of HDX HAPI + +## Data Subcategory Endpoints +HDX HAPI is organised around a set of key humanitarian data subcategories like **Baseline Population** and **Conflict Events**. Each of these subcategories can be queried via its endpoint. + +### Current list of data subcategory endpoints in HAPI + +#### Affected People + +- [Humanitarian Needs](https://stage.hapi-humdata-org.ahconu.org/docs#/Affected%20people/get_humanitarian_needs_api_v1_affected_people_humanitarian_needs_get) +- [Refugees](https://stage.hapi-humdata-org.ahconu.org/docs#/Affected%20people/get_refugees_api_v1_affected_people_refugees_get) + +#### Coordination and Context +- [Conflict Events](https://stage.hapi-humdata-org.ahconu.org/docs#/Conflict%20Events/get_conflict_events_api_v1_coordination_context_conflict_event_get) +- [Funding](https://stage.hapi-humdata-org.ahconu.org/docs#/Funding/get_fundings_api_v1_coordination_context_funding_get) +- [National risk](https://stage.hapi-humdata-org.ahconu.org/docs#/National%20Risk/get_national_risks_api_v1_coordination_context_national_risk_get) +- [Operational Presence (3W)](https://stage.hapi-humdata-org.ahconu.org/docs#/3W%20Operational%20Presence/get_operational_presences_api_v1_coordination_context_operational_presence_get) + +#### Food +- [Food Prices](https://stage.hapi-humdata-org.ahconu.org/docs#/Food%20Security%20%26%20Nutrition/get_food_prices_api_v1_food_food_price_get) +- [Food Security](https://stage.hapi-humdata-org.ahconu.org/docs#/Food%20Security%20%26%20Nutrition/get_food_security_api_v1_food_food_security_get) + +#### Population Social +- [Population](https://stage.hapi-humdata-org.ahconu.org/docs#/Baseline%20Population/get_populations_api_v1_population_social_population_get) +- [Poverty Rate](https://stage.hapi-humdata-org.ahconu.org/docs#/Baseline%20Population/get_poverty_rates_api_v1_population_social_poverty_rate_get) + +# Terms Of Use [The HDX Terms of Service](https://data.humdata.org/faqs/terms) -# The Structure of HAPI -## Indicator Endpoints -HAPI is organized around a set of key humanitarian indicators like **Baseline Population** and **3W - Operational Presence**. Each of these indicators can be queried via its endpoint. - -### Current list of indicator endpoints in HAPI -- [population](https://placeholder.url/docs#/population): Get data about baseline populations of a location -- [3w](https://placeholder.url/docs#/3W): Get data about operational presence. You can learn more about 3w data [here](https://3w.unocha.org/) - -## Supporting Tables -Additional supporting endpoints provide information about locations, codelists, and metadata. -### Current list of supporting endpoints in HAPI -- [admin-level](https://placeholder.url/docs#/admin-level): Get the lists of locations (countries and similar), and administrative subdivisions used as location references in HAPI. These are taken from the [Common Operational Datasets](https://data.humdata.org/dashboards/cod) -- [humanitarian-response](https://placeholder.url/docs#/humanitarian-response): Get the lists of organizations, organization types, and humanitarian sectors used in the data available in HAPI. -- [demographic](https://placeholder.url/docs#/demographic): Get the lists of gender categories and age groupings used in the data available in HAPI. -- [hdx-metadata](https://placeholder.url/docs#/hdx-metadata): Retrieve metadata about the source of any data available in HAPI. -## Dates -As of version 1 (released in late 2023), the data in HAPI is static and intended only for testing purposes. However you can filter your HAPI queries based on the date the source data was updated in HDX. Future versions will offer more robust date-related features. \ No newline at end of file +## FAQS +Please [refer to the landing page](https://dev.data-humdata-org.ahconu.org/hapi) for non-technical FAQs \ No newline at end of file diff --git a/docs/subcategory_details.md b/docs/subcategory_details.md new file mode 100644 index 00000000..c0b15109 --- /dev/null +++ b/docs/subcategory_details.md @@ -0,0 +1,260 @@ +# Data subcateogry details and usage notes + +--- + +This page contains detailed information about the data in HAPI, including usage notes on each subcategory of data. + +## Category: Affected People + +### Sub-category: Humanitarian Needs + +This data represents the shared understanding of OCHA Humanitarian Country Teams of people's widespread emergency needs during crises. It includes the estimated number of people who need assistance, often referred to as People in Need (PIN). This PIN data is derived from the [Joint Intersectoral Analysis Framework (JIAF)](https://www.jiaf.info/), which consolidates and analyses evidence to inform and direct strategic response planning. + +The JIAF data is available for all Humanitarian Response Plan (HRP) countries on HDX. However as the raw data comes from multiple providers (OCHA offices), and is not standardised at this time, HDX HAPI obtains the numbers from the [HPC Tools API](https://api.hpc.tools/docs/v1/). Datasets from the HPC API will soon be available on HDX. + +#### Details + + +||| +|:----|:----| +|Time series|Past datasets are available in the HPC API, but are sparse prior to 2023.| +|Update frequency|Annually| +|Data provider|OCHA HPC| +|Datasets|| +|Related endpoints and schemas|Sector, Disabled Marker, Gender, Population Group, Population Status| + +#### Usage Notes + +- The PIN should *not* be summed across sectors, as the same people can be counted across multiple sectors. For the number of people affected across all sectors, please use the PIN value where sector=intersectoral. +- Methodology in Yemen leads to negative population values in some admin 2 level areas. Where negative values appear they have been omitted from the API.https://www.jiaf.info/ + +### Sub-category: Refugees & Persons of Concern + +This dataset, compiled by the UNHCR, provides annual age- and gender-disaggregated statistics on refugees and others of concern, categorised by their country of origin and country of asylum. The data are sourced primarily from governments hosting these populations, UNHCR's own registration data, and occasionally data published by non-governmental organisations. + +#### Details +||| +|:----|:----| +|Time series|Yes, annual dating back to 2001| +|Update frequency|Annually| +|Data provider|UNHCR| +|Dataset|[Data on forcibly displaced populations and stateless persons (Global)](https://data.humdata.org/dataset/unhcr-population-data-for-world)| +|Related endpoints and schemas|Gender, Population Group| + +#### Usage Notes + +- The source data is not p-coded and only includes vague location descriptions. Since these are not consistently mappable to admin subdivisions, we aggregate to the national tables and enumslevel. +- The original data source contains several population groups such as IDPs and asylum seekers, but we only consider refugees (REF) and others of concern (OOC) +- Note that an “all” value in the` `gender` (age_range) column indicates a sum over all genders (age ranges) + +## Category: Coordination & Context + +### Sub-category: Operation Presence (3W - Who is Doing What Where) + +The [Who does What Where (3W)](https://3w.unocha.org/) is a core humanitarian coordination dataset that contains the geographic and sectoral spread of humanitarian activities and partners. It is critical to know where humanitarian organisations are working and what they are doing in order to ensure collaboration and efficient resource allocation, avoid duplication of efforts, identify gaps, and plan for future humanitarian response. + +#### Details + +||| +|:----|:----| +|Time series|Not available for this subcategory. While older resources are available on HDX, HDX HAPI currently only supports the latest resource.| +|Update frequency|Irregular, depending on the country. Common update frequencies include quarterly and annually. Please check individual resources for more information| +|Data provider|OCHA country and regional offices| +|Datasets|See the [data grids](https://data.humdata.org/dashboards/overview-of-data-grids?)| +|Related endpoints and schemas|Org, Org Type, Sector| + +#### Usage Notes + +- This data comes from multiple providers (OCHA offices), in many different formats and levels of detail +- For consistency and interoperability, we aggregate to an [operational presence](https://humanitarian.atlassian.net/wiki/spaces/imtoolbox/pages/214499412/Who+does+What+Where+3W) level (3W:OP, per org, sector, and admin2), even if the original 3W data is more detailed (e.g. the source lists individual activities) + +### Sub-category: Funding + +FTS publishes data on humanitarian funding flows as reported by donors and recipient organisations. It presents all humanitarian funding to a country and funding that is reported or that can be specifically mapped against funding requirements stated in Humanitarian Response Plans. + +#### Details + +||| +|:----|:----| +|Time series|Data contains funding for several years, but the timepoints are not regular| +|Update frequency|Annually| +|Data provider|OCHA FTS| +|Dataset|[OCHA FTS - Requirements and Funding Data series](https://data.humdata.org/dataset/?dataseries_name=OCHA+FTS+-+Requirements+and+Funding+Data)| +|Related endpoints and schemas|None| + +#### Usage Notes + +- The present version of the API currently captures only funding associated with an appeal. Funding data without associated appeals will be added in a future version. + +### Sub-category: Conflict Events + +[ACLED](https://acleddata.com/) collects real-time data on the locations, dates, actors, fatalities, and types of all reported political violence and protest events around the world. + +#### Details + +||| +|:----|:----| +|Time series|Yes, monthly| +|Update frequency|Weekly| +|Data provider|ACLED| +|Dataset|[ACLED Conflict Events Data series](https://data.humdata.org/dataset/?dataseries_name=ACLED+-+Conflict+Events)| +|Related endpoints and schemas|Event Type| + +#### Usage Notes + +- The API uses ACLED’s public aggregated data +- The data for political violence events, civilian targeting events, and demonstrations are in separate resource on HDX, but are combined into a single endpoint in the API + +### Sub-category: National Risk + +The [INFORM Risk Index](https://drmkc.jrc.ec.europa.eu/inform-index/INFORM-Risk) is a global, open-source risk assessment for humanitarian crises and disasters. It can support decisions about prevention, preparedness and response. For more information on the methodology, see [here](https://drmkc.jrc.ec.europa.eu/inform-index/INFORM-Risk/Methodology). + +#### Details + +||| +|:----|:----| +|Time series|Not available for this subcategory| +|Update frequency|Annually| +|Data provider|INFORM| +|Dataset|[INFORM Risk Index](https://data.humdata.org/dataset/inform-risk-index-2021)| +|Related endpoints and schemas|RiskClass| + +## Category: Food Security & Nutrition + +### Sub-category: Food Security + +The [IPC Acute Food Insecurity (IPC AFI) classification](https://www.ipcinfo.org/ipcinfo-website/ipc-overview-and-classification-system/ipc-acute-food-insecurity-classification/en/) provides strategically relevant information to decision makers that focuses on short-term objectives to prevent, mitigate or decrease severe food insecurity. + +#### Details + +||| +|:----|:----| +|Time series|Yes, with projections| +|Update frequency|Annually| +|Data provider|Food Security and Nutrition Working Group, West and Central Africa| +|Dataset|[West & Central Africa Food Security Data - Cadre Harmonise (CH) and Integrated Food Security Phase Classification (IPC) data](https://data.humdata.org/dataset/cadre-harmonise)| +|Related endpoints and schemas|IPCPhase, IPCType| + +#### Usage Notes + +- The Beta release only contains data from the [Cadre Harmonisé](https://www.cadreharmonise.org/en_GB), as it is p-coded. In a future release we will p-code and expand coverage to other IPC datasets. +- The reference period refers to the time frame that the projection covers, not when the projection was made += The IPC fraction is computed in the HDX HAPI API pipeline, buy dividing the population by the the total population (ipc_phase=all). +- The total population (ipc_phase=all) is not necessarily equal to the sum of the populations in phases 1-5. + +### Sub-category: Food Prices + +The World Food Programme Price Database covers foods such as maize, rice, beans, fish, and sugar for 98 countries and some 3000 markets. It is updated weekly but primarily contains data with a monthly update frequency. + +#### Details + +||| +|:----|:----| +|Time series|Yes, primarily monthly| +|Update frequency|Weekly| +|Data provider|The World Food Programme| +|Dataset|[WFP Food Prices data series](https://data.humdata.org/dataset/?dataseries_name=WFP+-+Food+Prices)| +|Related endpoints and schemas|Commodity, Currency, Market| + +#### Usage Notes + +- The source data is not p-coded, however we have used the admin 1 and 2 names to p-code most markets. See the Markets section for more details. + +## Category: Population & Socio-economy + +### Sub-category: Baseline Population + +This data comprises population statistics sourced from the [common operational datasets](https://cod.unocha.org/) (CODs), typical disaggregated by age and/or gender, and reaching administrative levels 1 or 2. The primary sources include the UNFPA and OCHA country offices, utilised for coordination purposes, although disparities may arise when compared to alternative sources. + +#### Details + +||| +|:----|:----| +|Time series|No| +|Update frequency|Annually| +|Data provider|UNFPA, OCHA country offices| +|Dataset|[COD - subnational population statistics data series](https://data.humdata.org/dataset/?dataseries_name=COD+-+Subnational+Population+Statistics)| +|Related endpoints and schemas|Gender| + +### Sub-category: Poverty Rate + +The global [Oxford Multidimensional Poverty Index](https://ophi.org.uk/global-mpi) (MPI) measures multidimensional poverty in over 100 developing countries, using internationally comparable datasets. +The MPI assesses poverty through three main dimensions: health, education, and living standards, each of which is represented by specific indicators.Please see the [OPHI methodological note](https://ophi.org.uk/publications/MN-54) for more details. + +#### Details + +||| +|:----|:----| +|Time series|Annual, with some combined years| +|Update frequency|Annually| +|Data provider|Oxford Poverty and Human Development Initiative (OPHI)| +|Dataset|[Oxford Poverty and Human Development Initiative - Global Multidimensional Poverty Index data series](https://data.humdata.org/dataset/?dataseries_name=Oxford+Poverty+and+Human+Development+Initiative+-+Global+Multidimensional+Poverty+Index)| +|Related endpoints and schemas|None| + + +#### Usage Notes + +The source data is very detailed. We’ve chosen the following subset of indicators, detailed below: + +|Indicator|format|Description| +|:----|:----|:----| +|mpi|fraction|The multidimensional poverty index. Derived as a product of the headcount ratio and intensity of deprivation.| +|headcount_ratio|%|The percentage of people deprived in 33% or more indicators| +|intensity_of_deprivation|%|The average proportion of indicators in which people are deprived| +|vulnerable_to_poverty|%|The percentage of people deprived in 20-33% of indicators| +|in_severe_poverty|%|The percentage of people deprived in 50% or more indicators| + +## Metadata + +### Sectors + +- The list of sectors is available through the sector endpoint +- This table is populated using the Global Coordination Groups, with the following additional entries: + - cash + - humanitarian assistance + - multi-sector + - intersectoral +The sector name strings in the 3W data are normalised and then aligned to the sector table, using the “sector_map” section of this configuration file if needed: https://github.com/OCHA-DAP/hapi-pipelines/blob/main/src/hapi/pipelines/configs/core.yaml. In the absence of a direct match, phonetic matching is used for strings > 5 characters. + +### Org + +- The organisation table is populated from the 3W data +- Organisation name and acronym strings are normalised. If an acronym isn’t available, the first 32 characters of the name are used. +- This [organisation mapping](https://docs.google.com/spreadsheets/d/e/2PACX-1vSfBWvSu3fKA743VvHtgf-pIGkYH7zhy-NP7DZgEV9_a6YU7vtCeWhbLM56aUL1iIfrfv5UBvvjVt7B/pub?gid=1040329566&single=true&output=csv) is used for common alternative names +- Organisations without a sector are not included +- Organisations can have an associated organisation type. If available, the organisation type code is taken directly from the 3W data, otherwise the name string is normalised and matched to the org type names. In the absence of a direct match, phonetic matching is used for strings > 5 characters. If no match is found, the organisation is skipped. + +### OrgType + +- The table is populated using [OCHA Digital Services organization types list](https://data.humdata.org/dataset/organization-types-beta), with the addition of: + - Civil Society + - Observer + - Development Programme + - Local NGO +Organisation types all have an associated name and code + +### IPC Code + +The IPC classification includes 5 different phases of increasing severity, described in detail on page 53 of [the IPC technical manual version 3.1](https://www.ipcinfo.org/fileadmin/user_upload/ipcinfo/manual/IPC_Technical_Manual_3_Final.pdf), and summarised below: + +|Phase|Name|Description| +|:----|:----|:----| +|1|None/Minimal|Households are able to meet essential food and non-food needs without engaging in atypical and unsustainable strategies to access food and income.| +|2|Stressed|Households have minimally adequate food consumption but are unable to afford some essential non-food expenditures without engaging in stress-coping strategies.| +|3|Crisis|Households either have food consumption gaps that are reflected by high or above-usual acute malnutrition, or are marginally able to meet minimum food needs but only by depleting essential livelihood assets or through crisis-coping strategies.| +|4|Emergency|Households either have large food consumption gaps which are reflected in very high acute malnutrition and excess mortality, or are able to mitigate large food consumption gaps but only by employing emergency livelihood strategies and asset liquidation.| +|5|Catastrophe/Famine|Households have an extreme lack of food and/or other basic needs even after full employment of coping strategies. Starvation, death, destitution and extremely critical acute malnutrition levels are evident. (For Famine Classification, an area needs to have extreme critical levels of acute malnutrition and mortality.)| +|3|In Need of Action|Sum of population in phases 3, 4, and 5. The population in Phase 3+ does not necessarily reflect the full population in need of urgent action. This is because some households may be in Phase 2 or even 1 but only because of receipt of assistance, and thus, they may be in need of continued action.| +|all|Total population|Total population| + +The above table also includes Phase 3+, to highlight the population in need of action, and total population, used to compute fractions. + +### IPC Type + +The IPC provides different projections to aid in planning and response efforts, outlined in the following table: + +|IPC type|Definition| +|:----|:----| +|current|Food insecurity that is occurring in the current analysis period.| +|first projection|Projected food insecurity occurring in the period immediately following the current analysis period.| +|second projection|Projected food insecurity occurring in the period immediately following the first projection period.| \ No newline at end of file diff --git a/hdx_hapi/config/__init__.py b/hdx_hapi/config/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hdx_hapi/config/config.py b/hdx_hapi/config/config.py index efce11f4..258cb4fa 100644 --- a/hdx_hapi/config/config.py +++ b/hdx_hapi/config/config.py @@ -1,7 +1,9 @@ -from dataclasses import dataclass import logging import os +from dataclasses import dataclass +from mixpanel import Mixpanel + from hdx_hapi.config.helper import create_pg_uri_from_env_without_protocol logger = logging.getLogger(__name__) @@ -17,12 +19,19 @@ class Config: HDX_DATASET_URL: str HDX_DATASET_API_URL: str HDX_ORGANIZATION_URL: str + HDX_ORGANIZATION_API_URL: str HDX_RESOURCE_URL: str HDX_RESOURCE_API_URL: str HAPI_READTHEDOCS_OVERVIEW_URL: str + HAPI_SERVER_URL: str + + HAPI_IDENTIFIER_FILTERING: bool + + MIXPANEL: Mixpanel + CONFIG = None @@ -34,6 +43,7 @@ def get_config() -> Config: sql_alchemy_asyncypg_db_uri = f'postgresql+asyncpg://{db_uri_without_protocol}' sql_alchemy_psycopg2_db_uri = f'postgresql+psycopg2://{db_uri_without_protocol}' + mixpanel_token = os.getenv('HDX_MIXPANEL_TOKEN', '') CONFIG = Config( SQL_ALCHEMY_ASYNCPG_DB_URI=sql_alchemy_asyncypg_db_uri, SQL_ALCHEMY_PSYCOPG2_DB_URI=sql_alchemy_psycopg2_db_uri, @@ -45,9 +55,15 @@ def get_config() -> Config: 'HDX_RESOURCE_API_URL', '{domain}/api/action/resource_show?id={resource_id}' ), HDX_ORGANIZATION_URL=os.getenv('HDX_ORGANIZATION_URL', '{domain}/organization/{org_id}'), + HDX_ORGANIZATION_API_URL=os.getenv( + 'HDX_ORGANIZATION_API_URL', '{domain}/api/action/organization_show?id={org_id}' + ), HAPI_READTHEDOCS_OVERVIEW_URL=os.getenv( 'HAPI_READTHEDOCS_OVERVIEW_URL', 'https://hdx-hapi.readthedocs.io/en/latest/' ), + HAPI_SERVER_URL=os.getenv('HAPI_SERVER_URL', None), + HAPI_IDENTIFIER_FILTERING=os.getenv('HAPI_IDENTIFIER_FILTERING', 'True').lower() == 'true', + MIXPANEL=Mixpanel(mixpanel_token) if mixpanel_token else None, ) return CONFIG diff --git a/hdx_hapi/config/doc_snippets.py b/hdx_hapi/config/doc_snippets.py index 269f0d2e..50acb4bd 100644 --- a/hdx_hapi/config/doc_snippets.py +++ b/hdx_hapi/config/doc_snippets.py @@ -2,39 +2,54 @@ CONFIG = get_config() +DOC_ADMIN1_REF = 'Filter the response by the 1st subnational administrative reference number. The admin1 reference is intended as a stable identifier which will not change if, for example, admin1 name changes' DOC_ADMIN1_CODE = 'Filter the response by the 1st subnational administrative divisions. The admin1 codes refer to the p-codes in the Common Operational Datasets.' DOC_ADMIN1_NAME = 'Filter the response by the 1st subnational administrative divisions. The admin1 names refer to the Common Operational Datasets.' +DOC_ADMIN2_REF = 'Filter the response by the 2nd subnational administrative reference number. The admin2 reference is intended as a stable identifier which will not change if, for example, admin2 name changes' DOC_ADMIN2_CODE = 'Filter the response by the 2nd subnational administrative divisions. The admin2 codes refer to the p-codes in the Common Operational Datasets.' -DOC_ADMIN2_NAME = 'Filter the response by the 1st subnational administrative divisions. The admin2 names refer to the Common Operational Datasets.' +DOC_ADMIN2_NAME = 'Filter the response by the 2nd subnational administrative divisions. The admin2 names refer to the Common Operational Datasets.' DOC_AGE_RANGE_SUMMARY = 'Get the list of age ranges used for disaggregating population data' -DOC_AGE_RANGE_CODE = 'Filter the response by the age range. These are expressed as [start year]-[end year]. The end year is assumed to be inclusive, though that is not always explicit in the source data.' +DOC_AGE_RANGE = 'Filter the response by the age range. These are expressed as [start age]-[end age]. The end age is assumed to be inclusive, though that is not always explicit in the source data.' DOC_GENDER_SUMMARY = 'Get the list of gender codes used for disaggregating population data' DOC_GENDER_CODE = 'Filter the response by the gender code.' +DOC_GENDER = 'Filter the response by the gender. f (female), m (male), x (non-binary), u (unspecified), o (other) and all (a sum over all genders)' DOC_GENDER_DESCRIPTION = 'Filter the response by the gender description.' -DOC_HDX_DATASET_ID = 'Filter the response by the dataset ID, which is a unique and fixed identifier of a Dataset on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset id]` will load the dataset page on HDX.' -DOC_HDX_DATASET_NAME = 'Filter the response by the URL-safe name of the dataset as displayed on HDX. This name is unique but can change. A URL in the pattern of `https://data.humdata.org/dataset/[dataset name]` will load the dataset page on HDX.' +DOC_POPULATION_GROUP = 'Filter the response by the population group. ' +DOC_POPULATION_STATUS = 'Filter the response by status. POP (population), AFF (affected), INN (in need), TGT (targeted), REA (reached) or all (all)' +DOC_HDX_DATASET_ID = 'Filter the response by the dataset ID (dataset_hdx_id), which is a unique and fixed identifier of a Dataset on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset_hdx_id]` will load the dataset page on HDX.' +DOC_HDX_DATASET_NAME = 'Filter the response by the URL-safe name (dataset_hdx_stub) of the dataset as displayed on HDX. This name is unique but can change. A URL in the pattern of `https://data.humdata.org/dataset/[dataset_hdx_stub]` will load the dataset page on HDX.' DOC_HDX_DATASET_TITLE = 'Filter the response by the title of the dataset as it appears in the HDX interface. This name is not unique and can change.' -DOC_HDX_PROVIDER_STUB = "Filter the response by the code of the provider of the dataset on HDX. A URL in the pattern of `https://data.humdata.org/organization/[org stub]` will load the provider's page on HDX." -DOC_HDX_PROVIDER_NAME = 'Filter the response by the display name of the provider of the dataset on HDX.' -DOC_HDX_RESOURCE_ID = 'Filter the response by the resource ID, which is a unique and fixed identifier of a Dataset on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset id]/resource/[resource id]` will load the dataset page on HDX.' -DOC_HDX_RESOURCE_FORMAT = 'Filter the response by the format of the resource on HDX. These are typically file formats, but can also include APIs and web apps.' +DOC_HDX_PROVIDER_STUB = "Filter the response by the code of the provider (organization) of the dataset on HDX. A URL in the pattern of `https://data.humdata.org/organization/[hdx_provider_stub]` will load the provider's page on HDX." +DOC_HDX_PROVIDER_NAME = 'Filter the response by the display name of the provider (organization) of the dataset on HDX.' +DOC_HDX_RESOURCE_ID = 'Filter the response by the resource ID (hdx_id), which is a unique and fixed identifier of a resource on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset_hdx_id]/resource/[resource_hdx_id]` will load the resource page on HDX.' +DOC_HDX_RESOURCE_FORMAT = 'Filter the response by the format of the resource on HDX. These are typically file formats (i.e. CSV, XLSX), but can also include APIs and web apps.' DOC_HDX_RESOURCE_HXL = ( 'Filter the response by whether or not the resource contains HXL tags.' ) +DOC_HDX_DATASET_IN_RESOURCE_ID = 'Filter the response by the dataset ID (dataset_hdx_id), which is a unique and fixed identifier of a dataset on HDX. A URL in the pattern of `https://data.humdata.org/dataset/[dataset_hdx_id]` will load the dataset page on HDX. ' +DOC_HDX_DATASET_IN_RESOURCE_NAME = 'Filter the response by the URL-safe name (dataset_hdx_stub) of the dataset as displayed on HDX. This name is unique but can change. A URL in the pattern of `https://data.humdata.org/dataset/[dataset_hdx_stub]` will load the dataset page on HDX.' +DOC_HDX_PROVIDER_IN_RESOURCE_STUB = "Filter the response by the code of the provider (organization) of the dataset on HDX. A URL in the pattern of `https://data.humdata.org/organization/[dataset_hdx_provider_stub]` will load the provider's page on HDX." +DOC_LOCATION_REF = 'Filter the response by a location (typically a country) reference number. The location reference is intended as a stable identifier which will not change if, for example location name changes' DOC_LOCATION_CODE = 'Filter the response by a location (typically a country). The location codes use the ISO-3 (ISO 3166 alpha-3) codes.' DOC_LOCATION_NAME = 'Filter the response by a location (typically a country). The location names are based on the "short name" from the UN M49 Standard.' -DOC_ORG_ACRONYM = 'Filter the response by the standard acronym used to represent the organization. When data is brought into the HAPI database, an attempt is made to standardize the acronyms.' -DOC_ORG_NAME = 'Filter the response by the standard name used to represent the organization. When data is brought into the HAPI database, an attempt is made to standardize the acronyms.' +DOC_ORG_ACRONYM = 'Filter the response by the standard acronym used to represent the organization. When data is brought into the HDX HAPI database, an attempt is made to standardize the acronyms.' +DOC_ORG_NAME = 'Filter the response by the standard name used to represent the organization. When data is brought into the HDX HAPI database, an attempt is made to standardize the acronyms.' DOC_ORG_TYPE_CODE = 'Filter the response by the organization type code.' DOC_ORG_TYPE_DESCRIPTION = 'Filter the response by the organization type description.' -DOC_SCOPE_DISCLAIMER = f'Not all data are available for all locations. Learn more about the scope of data coverage in HAPI in the Overview and Getting Started documentation.' +DOC_SCOPE_DISCLAIMER = f'Not all data are available for all locations. Learn more about the scope of data coverage in HDX HAPI in the Overview and Getting Started documentation.' DOC_SECTOR_CODE = 'Filter the response by the sector code.' DOC_SECTOR_NAME = 'Filter the response by the sector name.' DOC_UPDATE_DATE_MIN = 'Min date of update date, e.g. 2020-01-01 or 2020-01-01T00:00:00' DOC_UPDATE_DATE_MAX = 'Max date of update date, e.g. 2020-01-01 or 2020-01-01T00:00:00' +DOC_HAPI_UPDATED_DATE_MIN = 'Min date of HDX HAPI updated date, e.g. 2020-01-01 or 2020-01-01T00:00:00' +DOC_HAPI_UPDATED_DATE_MAX = 'Max date of HDX HAPI updated date, e.g. 2020-01-01 or 2020-01-01T00:00:00' +DOC_HAPI_REPLACED_DATE_MIN = 'Min date of HDX HAPI replaced date, e.g. 2020-01-01 or 2020-01-01T00:00:00' +DOC_HAPI_REPLACED_DATE_MAX = 'Max date of HDX HAPI replaced date, e.g. 2020-01-01 or 2020-01-01T00:00:00' -DOC_SEE_ADMIN1 = 'See the admin1 endpoint for details.' -DOC_SEE_ADMIN2 = 'See the admin2 endpoint for details.' -DOC_SEE_DATASET = 'See the dataset endpoint for details.' -DOC_SEE_LOC = 'See the location endpoint for details.' -DOC_SEE_ORG_TYPE = 'See the org type endpoint for details.' +DOC_SEE_ADMIN1 = 'See the admin1 endpoint for details.' +DOC_SEE_ADMIN2 = 'See the admin2 endpoint for details.' +DOC_SEE_DATASET = 'See the dataset endpoint for details.' +DOC_SEE_LOC = 'See the location endpoint for details.' +DOC_SEE_ORG_TYPE = 'See the org type endpoint for details.' + +DOC_CURRENCY_CODE = 'Filter the response by the currency code.' diff --git a/hdx_hapi/db/dao/admin1_view_dao.py b/hdx_hapi/db/dao/admin1_view_dao.py index 9f660eb8..ea36847b 100644 --- a/hdx_hapi/db/dao/admin1_view_dao.py +++ b/hdx_hapi/db/dao/admin1_view_dao.py @@ -1,34 +1,34 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_admin1_view import Admin1View -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import Admin1View +from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters logger = logging.getLogger(__name__) + async def admin1_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, location_code: str = None, location_name: str = None, ): - logger.info( - f'admin1_view_list called with params: code={code}, name={name}, ' \ + f'admin1_view_list called with params: code={code}, name={name}, ' f'location_code={location_code}, location_name={location_name}' ) query = select(Admin1View) if True: # TODO: implement debug=True to show unspecified values - query = query.where(Admin1View.is_unspecified==False) + query = query.where(Admin1View.is_unspecified == False) if code: query = case_insensitive_filter(query, Admin1View.code, code) if name: @@ -38,6 +38,8 @@ async def admin1_view_list( if location_name: query = query.where(Admin1View.location_name.icontains(location_name)) + query = apply_reference_period_filter(query, ref_period_parameters, Admin1View) + query = apply_pagination(query, pagination_parameters) logger.debug(f'Executing SQL query: {query}') @@ -47,4 +49,4 @@ async def admin1_view_list( logger.info(f'Retrieved {len(admin1_data)} rows from the database') - return admin1_data \ No newline at end of file + return admin1_data diff --git a/hdx_hapi/db/dao/admin2_view_dao.py b/hdx_hapi/db/dao/admin2_view_dao.py index efc794f8..3c6e58bb 100644 --- a/hdx_hapi/db/dao/admin2_view_dao.py +++ b/hdx_hapi/db/dao/admin2_view_dao.py @@ -1,18 +1,19 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_admin2_view import Admin2View -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import Admin2View +from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters logger = logging.getLogger(__name__) + async def admin2_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, @@ -21,16 +22,15 @@ async def admin2_view_list( location_code: str = None, location_name: str = None, ): - logger.info( - f'admin2_view_list called with params: code={code}, name={name}, admin1_code={admin1_code}, ' \ + f'admin2_view_list called with params: code={code}, name={name}, admin1_code={admin1_code}, ' f'admin1_name={admin1_name}, location_code={location_code}, location_name={location_name}' ) query = select(Admin2View) if True: # TODO: implement debug=True to show unspecified values - query = query.where(Admin2View.is_unspecified==False) + query = query.where(Admin2View.is_unspecified == False) if code: query = case_insensitive_filter(query, Admin2View.code, code) if name: @@ -44,6 +44,8 @@ async def admin2_view_list( if location_name: query = query.where(Admin2View.location_name.icontains(location_name)) + query = apply_reference_period_filter(query, ref_period_parameters, Admin2View) + query = apply_pagination(query, pagination_parameters) logger.debug(f'Executing SQL query: {query}') @@ -53,4 +55,4 @@ async def admin2_view_list( logger.info(f'Retrieved {len(admin2_data)} rows from the database') - return admin2_data \ No newline at end of file + return admin2_data diff --git a/hdx_hapi/db/dao/age_range_view_dao.py b/hdx_hapi/db/dao/age_range_view_dao.py deleted file mode 100644 index d5b64467..00000000 --- a/hdx_hapi/db/dao/age_range_view_dao.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging - -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select - -from hdx_hapi.db.models.views.db_age_range_view import AgeRangeView -from hdx_hapi.db.dao.util.util import apply_pagination - -logger = logging.getLogger(__name__) - -async def age_ranges_view_list( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, -): - - logger.info(f'age_ranges_view_list called with params: code={code}') - - query = select(AgeRangeView) - if code: - query = query.where(AgeRangeView.code == code) - query = apply_pagination(query, pagination_parameters) - - logger.debug(f'Executing SQL query: {query}') - - result = await db.execute(query) - age_ranges = result.scalars().all() - - logger.info(f'Retrieved {len(age_ranges)} rows from the database') - - return age_ranges \ No newline at end of file diff --git a/hdx_hapi/db/dao/conflict_event_view_dao.py b/hdx_hapi/db/dao/conflict_event_view_dao.py new file mode 100644 index 00000000..a6252deb --- /dev/null +++ b/hdx_hapi/db/dao/conflict_event_view_dao.py @@ -0,0 +1,68 @@ +import logging +from typing import Optional, Sequence + +from hapi_schema.utils.enums import EventType +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import ConflictEventView +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + apply_reference_period_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +logger = logging.getLogger(__name__) + + +async def conflict_event_view_list( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + event_type: Optional[EventType] = None, + location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Sequence[ConflictEventView]: + query = select(ConflictEventView) + if event_type: + query = query.where(ConflictEventView.event_type == event_type) + + query = apply_location_admin_filter( + query, + ConflictEventView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_reference_period_filter(query, ref_period_parameters, ConflictEventView) + + query = apply_pagination(query, pagination_parameters) + + logger.debug(f'Executing SQL query: {query}') + + result = await db.execute(query) + conflict_events = result.scalars().all() + + logger.info(f'Retrieved {len(conflict_events)} rows from the database') + + return conflict_events diff --git a/hdx_hapi/db/dao/currency_view_dao.py b/hdx_hapi/db/dao/currency_view_dao.py new file mode 100644 index 00000000..c8f4acf1 --- /dev/null +++ b/hdx_hapi/db/dao/currency_view_dao.py @@ -0,0 +1,33 @@ +import logging +from typing import Optional +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import CurrencyView +from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams + +logger = logging.getLogger(__name__) + + +async def currencies_view_list( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, +): + logger.info(f'currency_view_list called with params: code={code}') + + query = select(CurrencyView) + if code: + query = case_insensitive_filter(query, CurrencyView.code, code) + + query = apply_pagination(query, pagination_parameters) + + logger.debug(f'Executing SQL query: {query}') + + result = await db.execute(query) + currencies = result.scalars().all() + + logger.info(f'Retrieved {len(currencies)} rows from the database') + + return currencies diff --git a/hdx_hapi/db/dao/dataset_view_dao.py b/hdx_hapi/db/dao/dataset_view_dao.py index 2c1f1ff4..e6752fbf 100644 --- a/hdx_hapi/db/dao/dataset_view_dao.py +++ b/hdx_hapi/db/dao/dataset_view_dao.py @@ -1,37 +1,38 @@ import logging - -from typing import Dict +from typing import Optional from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_dataset_view import DatasetView +from hdx_hapi.db.models.views.all_views import DatasetView from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams logger = logging.getLogger(__name__) + async def datasets_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, - hdx_id: str = None, - hdx_stub: str = None, - title: str = None, - hdx_provider_stub: str = None, - hdx_provider_name: str = None, + dataset_hdx_id: Optional[str] = None, + dataset_hdx_stub: Optional[str] = None, + dataset_hdx_title: Optional[str] = None, + hdx_provider_stub: Optional[str] = None, + hdx_provider_name: Optional[str] = None, ): - logger.info( - f'datasets_view_list called with params: hdx_id={hdx_id}, hdx_stub={hdx_stub}, title={title}, ' \ - f'hdx_provider_stub={hdx_provider_stub}, hdx_provider_name={hdx_provider_name}' + f'datasets_view_list called with params: dataset_hdx_id={dataset_hdx_id}, dataset_hdx_stub={dataset_hdx_stub}, ' + f'dataset_hdx_title={dataset_hdx_title}, hdx_provider_stub={hdx_provider_stub}, ' + f'hdx_provider_name={hdx_provider_name}' ) query = select(DatasetView) - if hdx_id: - query = query.where(DatasetView.hdx_id == hdx_id) - if hdx_stub: - query = query.where(DatasetView.hdx_stub == hdx_stub) - if title: - query = query.where(DatasetView.title.icontains(title)) + if dataset_hdx_id: + query = query.where(DatasetView.dataset_hdx_id == dataset_hdx_id) + if dataset_hdx_stub: + query = query.where(DatasetView.dataset_hdx_stub == dataset_hdx_stub) + if dataset_hdx_title: + query = query.where(DatasetView.dataset_hdx_title.icontains(dataset_hdx_title)) if hdx_provider_stub: query = case_insensitive_filter(query, DatasetView.hdx_provider_stub, hdx_provider_stub) if hdx_provider_name: @@ -39,11 +40,11 @@ async def datasets_view_list( query = apply_pagination(query, pagination_parameters) - logger.debug(f'Executing SQL query: {query}') + logger.info(f'Executing SQL query: {query}') result = await db.execute(query) datasets = result.scalars().all() logger.info(f'Retrieved {len(datasets)} rows from the database') - return datasets \ No newline at end of file + return datasets diff --git a/hdx_hapi/db/dao/food_price_dao.py b/hdx_hapi/db/dao/food_price_dao.py new file mode 100644 index 00000000..816d9930 --- /dev/null +++ b/hdx_hapi/db/dao/food_price_dao.py @@ -0,0 +1,88 @@ +from decimal import Decimal +import logging +from typing import Optional, Sequence + +from hapi_schema.utils.enums import CommodityCategory, PriceFlag, PriceType +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + case_insensitive_filter, +) +from hdx_hapi.db.models.views.all_views import FoodPriceView +from hdx_hapi.endpoints.util.util import PaginationParams + + +logger = logging.getLogger(__name__) + + +async def food_price_view_list( + pagination_parameters: PaginationParams, + db: AsyncSession, + market_code: Optional[str] = None, + market_name: Optional[str] = None, + commodity_code: Optional[str] = None, + commodity_category: Optional[CommodityCategory] = None, + commodity_name: Optional[str] = None, + price_flag: Optional[PriceFlag] = None, + price_type: Optional[PriceType] = None, + price_min: Optional[Decimal] = None, + price_max: Optional[Decimal] = None, + # lat: Optional[float] = None, + # lon: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Sequence[FoodPriceView]: + query = select(FoodPriceView) + if market_code: + query = case_insensitive_filter(query, FoodPriceView.market_code, market_code) + if market_name: + query = query.where(FoodPriceView.market_name.icontains(market_name)) + if commodity_code: + query = case_insensitive_filter(query, FoodPriceView.commodity_code, commodity_code) + if commodity_category: + query = query.where(FoodPriceView.commodity_category == commodity_category) + if commodity_name: + query = query.where(FoodPriceView.commodity_name.icontains(commodity_name)) + if price_flag: + query = query.where(FoodPriceView.price_flag == price_flag) + if price_type: + query = query.where(FoodPriceView.price_type == price_type) + if price_min: + query = query.where(FoodPriceView.price >= price_min) + if price_max: + query = query.where(FoodPriceView.price < price_max) + + query = apply_location_admin_filter( + query, + FoodPriceView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_pagination(query, pagination_parameters) + + result = await db.execute(query) + food_prices = result.scalars().all() + + return food_prices diff --git a/hdx_hapi/db/dao/food_security_view_dao.py b/hdx_hapi/db/dao/food_security_view_dao.py index 0fd8c9cc..fd87c140 100644 --- a/hdx_hapi/db/dao/food_security_view_dao.py +++ b/hdx_hapi/db/dao/food_security_view_dao.py @@ -1,62 +1,63 @@ -import datetime -from typing import Dict - +from typing import Optional from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_food_security_view import FoodSecurityView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hapi_schema.utils.enums import IPCType, IPCPhase + +from hdx_hapi.db.models.views.all_views import FoodSecurityView +from hdx_hapi.db.dao.util.util import ( + ReferencePeriodParameters, + PaginationParams, + apply_location_admin_filter, + apply_reference_period_filter, + apply_pagination, +) async def food_security_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - ipc_phase_code: str = None, - ipc_type_code: str = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min: datetime = None, - resource_update_date_max: datetime = None, - location_code: str = None, - location_name: str = None, - admin1_name: str = None, - admin1_code: str = None, - admin1_is_unspecified: bool = None, - admin2_code: str = None, - admin2_name: str = None, - admin2_is_unspecified: bool = None, + ipc_phase: Optional[IPCPhase] = None, + ipc_type: Optional[IPCType] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_code: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + location_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, + admin1_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, ): - query = select(FoodSecurityView) - if ipc_phase_code: - query = query.where(FoodSecurityView.ipc_phase_code == ipc_phase_code) - if ipc_type_code: - query = case_insensitive_filter(query, FoodSecurityView.ipc_type_code, ipc_type_code) - if dataset_hdx_provider_stub: - query = case_insensitive_filter(query, FoodSecurityView.dataset_hdx_provider_stub, dataset_hdx_provider_stub) - if resource_update_date_min: - query = query.where(FoodSecurityView.resource_update_date >= resource_update_date_min) - if resource_update_date_max: - query = query.where(FoodSecurityView.resource_update_date < resource_update_date_max) - if location_code: - query = case_insensitive_filter(query, FoodSecurityView.location_code, location_code) - if location_name: - query = query.where(FoodSecurityView.location_name.icontains(location_name)) - if admin1_name: - query = query.where(FoodSecurityView.admin1_name.icontains(admin1_name)) - if admin1_code: - query = case_insensitive_filter(query, FoodSecurityView.admin1_code, admin1_code) - if admin1_is_unspecified is not None: - query = query.where(FoodSecurityView.admin1_is_unspecified == admin1_is_unspecified) - if admin2_code: - query = case_insensitive_filter(query, FoodSecurityView.admin2_code, admin2_code) - if admin2_name: - query = query.where(FoodSecurityView.admin2_name.icontains(admin2_name)) - if admin2_is_unspecified is not None: - query = query.where(FoodSecurityView.admin2_is_unspecified == admin2_is_unspecified) - + if ipc_phase: + query = query.where(FoodSecurityView.ipc_phase == ipc_phase) + if ipc_type: + query = query.where(FoodSecurityView.ipc_type == ipc_type) + + query = apply_location_admin_filter( + query, + FoodSecurityView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_reference_period_filter(query, ref_period_parameters, FoodSecurityView) query = apply_pagination(query, pagination_parameters) result = await db.execute(query) food_security = result.scalars().all() - return food_security \ No newline at end of file + return food_security diff --git a/hdx_hapi/db/dao/funding_view_dao.py b/hdx_hapi/db/dao/funding_view_dao.py new file mode 100644 index 00000000..c0805614 --- /dev/null +++ b/hdx_hapi/db/dao/funding_view_dao.py @@ -0,0 +1,57 @@ +import logging +from typing import Optional, Sequence + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import FundingView +from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +logger = logging.getLogger(__name__) + + +async def funding_view_list( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + appeal_code: Optional[str] = None, + appeal_type: Optional[str] = None, + org_acronym: Optional[str] = None, + org_name: Optional[str] = None, + sector_name: Optional[str] = None, + # location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, +) -> Sequence[FundingView]: + query = select(FundingView) + if org_acronym: + query = case_insensitive_filter(query, FundingView.org_acronym, org_acronym) + if org_name: + query = query.where(FundingView.org_name.icontains(org_name)) + if sector_name: + query = query.where(FundingView.sector_name.icontains(sector_name)) + # if location_ref: + # query = query.where(FundingView.location_ref == location_ref) + if location_code: + query = case_insensitive_filter(query, FundingView.location_code, location_code) + if location_name: + query = query.where(FundingView.location_name.icontains(location_name)) + if appeal_code: + query = case_insensitive_filter(query, FundingView.appeal_code, appeal_code) + if appeal_type: + query = case_insensitive_filter(query, FundingView.appeal_type, appeal_type) + + query = apply_reference_period_filter(query, ref_period_parameters, FundingView) + + query = apply_pagination(query, pagination_parameters) + + logger.debug(f'Executing SQL query: {query}') + + result = await db.execute(query) + funding = result.scalars().all() + + logger.info(f'Retrieved {len(funding)} rows from the database') + + return funding diff --git a/hdx_hapi/db/dao/gender_view_dao.py b/hdx_hapi/db/dao/gender_view_dao.py deleted file mode 100644 index ca385048..00000000 --- a/hdx_hapi/db/dao/gender_view_dao.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging - -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select - -from hdx_hapi.db.models.views.db_gender_view import GenderView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter - -logger = logging.getLogger(__name__) - -async def genders_view_list( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None, -): - - logger.info(f'genders_view_list called with params: code={code}, description={description}') - - query = select(GenderView) - if code: - query = case_insensitive_filter(query, GenderView.code, code) - if description: - query = query.where(GenderView.description.icontains(description)) - - query = apply_pagination(query, pagination_parameters) - - logger.debug(f'Executing SQL query: {query}') - - result = await db.execute(query) - genders = result.scalars().all() - - logger.info(f'Retrieved {len(genders)} rows from the database') - - return genders \ No newline at end of file diff --git a/hdx_hapi/db/dao/humanitarian_needs_view_dao.py b/hdx_hapi/db/dao/humanitarian_needs_view_dao.py index 3706729d..0a0d3d80 100644 --- a/hdx_hapi/db/dao/humanitarian_needs_view_dao.py +++ b/hdx_hapi/db/dao/humanitarian_needs_view_dao.py @@ -1,79 +1,82 @@ -import datetime -from typing import Dict +from typing import Optional from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_humanitarian_needs_view import HumanitarianNeedsView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import HumanitarianNeedsView +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + apply_reference_period_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters +from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus async def humanitarian_needs_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - gender_code: str = None, - age_range_code: str = None, - disabled_marker: bool = None, - sector_code: str = None, - sector_name: str = None, - population_group_code: str = None, - population_status_code: str = None, - population: int = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min: datetime = None, - resource_update_date_max: datetime = None, - location_code: str = None, - location_name: str = None, - admin1_code: str = None, - # admin1_name: str = None, - admin1_is_unspecified: bool = None, - admin2_code: str = None, - admin2_name: str = None, - admin2_is_unspecified: bool = None, + admin2_ref: Optional[int] = None, + gender: Optional[Gender] = None, + age_range: Optional[str] = None, + disabled_marker: Optional[DisabledMarker] = None, + sector_code: Optional[str] = None, + population_group: Optional[PopulationGroup] = None, + population_status: Optional[PopulationStatus] = None, + population_min: Optional[int] = None, + population_max: Optional[int] = None, + sector_name: Optional[str] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + location_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + admin2_is_unspecified: Optional[bool] = None, ): - query = select(HumanitarianNeedsView) - if gender_code: - query = case_insensitive_filter(query, HumanitarianNeedsView.gender_code, gender_code) - if age_range_code: - query = query.where(HumanitarianNeedsView.age_range_code == age_range_code) + if gender: + query = query.where(HumanitarianNeedsView.gender == gender) + if age_range: + query = query.where(HumanitarianNeedsView.age_range == age_range) if disabled_marker: query = query.where(HumanitarianNeedsView.disabled_marker == disabled_marker) if sector_code: query = query.where(HumanitarianNeedsView.sector_code.icontains(sector_code)) + if population_group: + query = query.where(HumanitarianNeedsView.population_group == population_group) + if population_status: + query = query.where(HumanitarianNeedsView.population_status == population_status) + + if population_min: + query = query.where(HumanitarianNeedsView.population >= population_min) + if population_max: + query = query.where(HumanitarianNeedsView.population < population_max) if sector_name: query = query.where(HumanitarianNeedsView.sector_name.icontains(sector_name)) - if population_group_code: - query = query.where(HumanitarianNeedsView.population_group_code.icontains(population_group_code)) - if population_status_code: - query = query.where(HumanitarianNeedsView.population_status_code.icontains(population_status_code)) - if population: - query = query.where(HumanitarianNeedsView.population == population) - if dataset_hdx_provider_stub: - query = case_insensitive_filter( - query, HumanitarianNeedsView.dataset_hdx_provider_stub, dataset_hdx_provider_stub - ) - if resource_update_date_min: - query = query.where(HumanitarianNeedsView.resource_update_date >= resource_update_date_min) - if resource_update_date_max: - query = query.where(HumanitarianNeedsView.resource_update_date < resource_update_date_max) - if location_code: - query = case_insensitive_filter(query, HumanitarianNeedsView.location_code, location_code) - if location_name: - query = query.where(HumanitarianNeedsView.location_name.icontains(location_name)) - if admin1_code: - query = case_insensitive_filter(query, HumanitarianNeedsView.admin1_code, admin1_code) - # if admin1_name: - # query = query.where(HumanitarianNeedsView.admin1_name.icontains(admin1_name)) - if admin1_is_unspecified is not None: - query = query.where(HumanitarianNeedsView.admin1_is_unspecified == admin1_is_unspecified) - if admin2_code: - query = case_insensitive_filter(query, HumanitarianNeedsView.admin2_code, admin2_code) - if admin2_name: - query = query.where(HumanitarianNeedsView.admin2_name.icontains(admin2_name)) - if admin2_is_unspecified is not None: - query = query.where(HumanitarianNeedsView.admin2_is_unspecified == admin2_is_unspecified) + + query = apply_location_admin_filter( + query, + HumanitarianNeedsView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_reference_period_filter(query, ref_period_parameters, HumanitarianNeedsView) query = apply_pagination(query, pagination_parameters) diff --git a/hdx_hapi/db/dao/location_view_dao.py b/hdx_hapi/db/dao/location_view_dao.py index dc3c14c2..2700b01f 100644 --- a/hdx_hapi/db/dao/location_view_dao.py +++ b/hdx_hapi/db/dao/location_view_dao.py @@ -1,22 +1,22 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_location_view import LocationView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import LocationView +from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters logger = logging.getLogger(__name__) + async def locations_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, ): - logger.info(f'orgs_view_list called with params: code={code}, name={name}') query = select(LocationView) @@ -25,6 +25,8 @@ async def locations_view_list( if name: query = query.where(LocationView.name.icontains(name)) + query = apply_reference_period_filter(query, ref_period_parameters, LocationView) + query = apply_pagination(query, pagination_parameters) logger.debug(f'Executing SQL query: {query}') @@ -34,4 +36,4 @@ async def locations_view_list( logger.info(f'Retrieved {len(locations)} rows from the database') - return locations \ No newline at end of file + return locations diff --git a/hdx_hapi/db/dao/national_risk_view_dao.py b/hdx_hapi/db/dao/national_risk_view_dao.py index c09ebb44..4a981935 100644 --- a/hdx_hapi/db/dao/national_risk_view_dao.py +++ b/hdx_hapi/db/dao/national_risk_view_dao.py @@ -1,57 +1,67 @@ -import datetime -from typing import Dict +from typing import Optional +from hapi_schema.utils.enums import RiskClass from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_national_risk_view import NationalRiskView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import NationalRiskView +from hdx_hapi.db.dao.util.util import apply_pagination, apply_reference_period_filter, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + async def national_risks_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - risk_class: int = None, - global_rank: int = None, - overall_risk: float = None, - hazard_exposure_risk: float = None, - vulnerability_risk: float = None, - coping_capacity_risk: float = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min: datetime = None, - resource_update_date_max: datetime = None, - # sector_name: str = None, - location_code: str = None, - location_name: str = None, + risk_class: Optional[RiskClass] = None, + global_rank_min: Optional[int] = None, + global_rank_max: Optional[int] = None, + overall_risk_min: Optional[float] = None, + overall_risk_max: Optional[float] = None, + hazard_exposure_risk_min: Optional[float] = None, + hazard_exposure_risk_max: Optional[float] = None, + vulnerability_risk_min: Optional[float] = None, + vulnerability_risk_max: Optional[float] = None, + coping_capacity_risk_min: Optional[float] = None, + coping_capacity_risk_max: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, ): - query = select(NationalRiskView) if risk_class: query = query.where(NationalRiskView.risk_class == risk_class) - if global_rank: - query = query.where(NationalRiskView.global_rank == global_rank) - if overall_risk: - query = query.where(NationalRiskView.overall_risk == overall_risk) - if hazard_exposure_risk: - query = query.where(NationalRiskView.hazard_exposure_risk == hazard_exposure_risk) - if vulnerability_risk: - query = query.where(NationalRiskView.vulnerability_risk == vulnerability_risk) - if coping_capacity_risk: - query = query.where(NationalRiskView.coping_capacity_risk == coping_capacity_risk) - if dataset_hdx_provider_stub: - query = case_insensitive_filter(query, NationalRiskView.dataset_hdx_provider_stub, dataset_hdx_provider_stub) - if resource_update_date_min: - query = query.where(NationalRiskView.resource_update_date >= resource_update_date_min) - if resource_update_date_max: - query = query.where(NationalRiskView.resource_update_date < resource_update_date_max) + if global_rank_min: + query = query.where(NationalRiskView.global_rank >= global_rank_min) + if global_rank_max: + query = query.where(NationalRiskView.global_rank < global_rank_max) + if overall_risk_min: + query = query.where(NationalRiskView.overall_risk >= overall_risk_min) + if overall_risk_max: + query = query.where(NationalRiskView.overall_risk < overall_risk_max) + if hazard_exposure_risk_min: + query = query.where(NationalRiskView.hazard_exposure_risk >= hazard_exposure_risk_min) + if hazard_exposure_risk_max: + query = query.where(NationalRiskView.hazard_exposure_risk < hazard_exposure_risk_max) + if vulnerability_risk_min: + query = query.where(NationalRiskView.vulnerability_risk >= vulnerability_risk_min) + if vulnerability_risk_max: + query = query.where(NationalRiskView.vulnerability_risk < vulnerability_risk_max) + if coping_capacity_risk_min: + query = query.where(NationalRiskView.coping_capacity_risk >= coping_capacity_risk_min) + if coping_capacity_risk_max: + query = query.where(NationalRiskView.coping_capacity_risk < coping_capacity_risk_max) + # if sector_name: - # query = query.where(NationalRiskView.sector_name.icontains(sector_name)) + # query = query.where(NationalRiskView.sector_name.icontains(sector_name)) if location_code: query = case_insensitive_filter(query, NationalRiskView.location_code, location_code) if location_name: query = query.where(NationalRiskView.location_name.icontains(location_name)) + query = apply_reference_period_filter(query, ref_period_parameters, NationalRiskView) + query = apply_pagination(query, pagination_parameters) result = await db.execute(query) national_risks = result.scalars().all() - return national_risks \ No newline at end of file + return national_risks diff --git a/hdx_hapi/db/dao/operational_presence_view_dao.py b/hdx_hapi/db/dao/operational_presence_view_dao.py index ff8f5490..578ae163 100644 --- a/hdx_hapi/db/dao/operational_presence_view_dao.py +++ b/hdx_hapi/db/dao/operational_presence_view_dao.py @@ -1,79 +1,101 @@ import logging -from datetime import datetime -from typing import Dict +from typing import Optional, Sequence from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_operational_presence_view import OperationalPresenceView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.db.models.views.all_views import OperationalPresenceView +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + apply_reference_period_filter, + case_insensitive_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters logger = logging.getLogger(__name__) + async def operational_presences_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - sector_code: str = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min: datetime = None, - resource_update_date_max: datetime = None, - org_acronym: str = None, - org_name: str = None, - sector_name: str = None, - location_code: str = None, - location_name: str = None, - admin1_code: str = None, - admin1_name: str = None, - admin1_is_unspecified: bool = None, - admin2_code: str = None, - admin2_name: str = None, - admin2_is_unspecified: bool = None, - -): - + sector_code: Optional[str] = None, + # dataset_hdx_provider_stub: str = None, + # resource_update_date_min: datetime = None, + # resource_update_date_max: datetime = None, + # hapi_updated_date_min: datetime = None, + # hapi_updated_date_max: datetime = None, + # hapi_replaced_date_min: datetime = None, + # hapi_replaced_date_max: datetime = None, + org_acronym: Optional[str] = None, + org_name: Optional[str] = None, + sector_name: Optional[str] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Sequence[OperationalPresenceView]: logger.info( - f'operational_presences_view_list called with params: sector_code={sector_code}, ' \ - f'dataset_hdx_provider_stub={dataset_hdx_provider_stub}, resource_update_date_min={resource_update_date_min}, '\ - f'resource_update_date_max={resource_update_date_max}, org_acronym={org_acronym}, org_name={org_name}, ' \ - f'sector_name={sector_name}, location_code={location_code}, location_name={location_name}, ' \ - f'admin1_code={admin1_code}, admin1_name={admin1_name}, admin1_is_unspecified={admin1_is_unspecified}, ' \ - f'admin2_code={admin2_code}, admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}' + f'operational_presences_view_list called with params: sector_code={sector_code}, ' + f'org_acronym={org_acronym}, org_name={org_name}, ' + f'sector_name={sector_name}, location_code={location_code}, location_name={location_name}, ' + f'admin1_code={admin1_code}, admin1_name={admin1_name}, admin1_is_unspecified={admin1_is_unspecified}, ' + f'admin2_code={admin2_code}, admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}, ' + f'ref_period_parameters={ref_period_parameters}' ) query = select(OperationalPresenceView) - if sector_code: - query = query.where(OperationalPresenceView.sector_code.icontains(sector_code)) - if dataset_hdx_provider_stub: - query = case_insensitive_filter( - query, OperationalPresenceView.dataset_hdx_provider_stub, dataset_hdx_provider_stub - ) - if resource_update_date_min: - query = query.where(OperationalPresenceView.resource_update_date >= resource_update_date_min) - if resource_update_date_max: - query = query.where(OperationalPresenceView.resource_update_date < resource_update_date_max) + # if dataset_hdx_provider_stub: + # query = case_insensitive_filter( + # query, OperationalPresenceView.dataset_hdx_provider_stub, dataset_hdx_provider_stub + # ) + # if resource_update_date_min: + # query = query.where(OperationalPresenceView.resource_update_date >= resource_update_date_min) + # if resource_update_date_max: + # query = query.where(OperationalPresenceView.resource_update_date < resource_update_date_max) + # if hapi_updated_date_min: + # query = query.where(OperationalPresenceView.hapi_updated_date >= hapi_updated_date_min) + # if hapi_updated_date_max: + # query = query.where(OperationalPresenceView.hapi_updated_date < hapi_updated_date_max) + # if hapi_replaced_date_min: + # query = query.where(OperationalPresenceView.hapi_replaced_date >= hapi_replaced_date_min) + # if hapi_replaced_date_max: + # query = query.where(OperationalPresenceView.hapi_replaced_date < hapi_replaced_date_max) if org_acronym: query = case_insensitive_filter(query, OperationalPresenceView.org_acronym, org_acronym) if org_name: query = query.where(OperationalPresenceView.org_name.icontains(org_name)) + if sector_code: + query = query.where(OperationalPresenceView.sector_code.icontains(sector_code)) if sector_name: query = query.where(OperationalPresenceView.sector_name.icontains(sector_name)) - if location_code: - query = case_insensitive_filter(query, OperationalPresenceView.location_code, location_code) - if location_name: - query = query.where(OperationalPresenceView.location_name.icontains(location_name)) - if admin1_code: - query = case_insensitive_filter(query, OperationalPresenceView.admin1_code, admin1_code) - if admin1_name: - query = query.where(OperationalPresenceView.admin1_name.icontains(admin1_name)) - if admin2_code: - query = case_insensitive_filter(query, OperationalPresenceView.admin2_code, admin2_code) - if admin2_name: - query = query.where(OperationalPresenceView.admin2_name.icontains(admin2_name)) - if admin1_is_unspecified is not None: - query = query.where(OperationalPresenceView.admin1_is_unspecified == admin1_is_unspecified) - if admin2_is_unspecified is not None: - query = query.where(OperationalPresenceView.admin2_is_unspecified == admin2_is_unspecified) + + query = apply_location_admin_filter( + query, + OperationalPresenceView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_reference_period_filter(query, ref_period_parameters, OperationalPresenceView) query = apply_pagination(query, pagination_parameters) diff --git a/hdx_hapi/db/dao/org_type_view_dao.py b/hdx_hapi/db/dao/org_type_view_dao.py index e7599440..73528117 100644 --- a/hdx_hapi/db/dao/org_type_view_dao.py +++ b/hdx_hapi/db/dao/org_type_view_dao.py @@ -1,22 +1,21 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_org_type_view import OrgTypeView +from hdx_hapi.db.models.views.all_views import OrgTypeView from hdx_hapi.db.dao.util.util import apply_pagination +from hdx_hapi.endpoints.util.util import PaginationParams logger = logging.getLogger(__name__) + async def org_types_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, code: str = None, description: str = None, ): - logger.info(f'org_types_view_list called with params: code={code}, description={description}') query = select(OrgTypeView) @@ -34,4 +33,4 @@ async def org_types_view_list( logger.info(f'Retrieved {len(org_types)} rows from the database') - return org_types \ No newline at end of file + return org_types diff --git a/hdx_hapi/db/dao/org_view_dao.py b/hdx_hapi/db/dao/org_view_dao.py index ef78e3cc..0147c94e 100644 --- a/hdx_hapi/db/dao/org_view_dao.py +++ b/hdx_hapi/db/dao/org_view_dao.py @@ -1,26 +1,25 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_org_view import OrgView +from hdx_hapi.db.models.views.all_views import OrgView from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams logger = logging.getLogger(__name__) + async def orgs_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, acronym: str = None, name: str = None, org_type_code: str = None, org_type_description: str = None, ): - logger.info( - f'orgs_view_list called with params: acronym={acronym}, name={name}, org_type_code={org_type_code}, ' \ + f'orgs_view_list called with params: acronym={acronym}, name={name}, org_type_code={org_type_code}, ' f'org_type_description={org_type_description}' ) @@ -43,4 +42,4 @@ async def orgs_view_list( logger.info(f'Retrieved {len(orgs)} rows from the database') - return orgs \ No newline at end of file + return orgs diff --git a/hdx_hapi/db/dao/population_group_view_dao.py b/hdx_hapi/db/dao/population_group_view_dao.py deleted file mode 100644 index b6b04312..00000000 --- a/hdx_hapi/db/dao/population_group_view_dao.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging - -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select - -from hdx_hapi.db.models.views.db_population_group_view import PopulationGroupView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter - -logger = logging.getLogger(__name__) - -async def population_groups_view_list( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None, -): - - logger.info(f'population_groups_view_list called with params: code={code}, description={description}') - - query = select(PopulationGroupView) - if code: - query = case_insensitive_filter(query, PopulationGroupView.code, code) - if description: - query = query.where(PopulationGroupView.description.icontains(description)) - - query = apply_pagination(query, pagination_parameters) - - logger.debug(f'Executing SQL query: {query}') - - result = await db.execute(query) - population_groups = result.scalars().all() - - logger.info(f'Retrieved {len(population_groups)} rows from the database') - - return population_groups \ No newline at end of file diff --git a/hdx_hapi/db/dao/population_status_view_dao.py b/hdx_hapi/db/dao/population_status_view_dao.py deleted file mode 100644 index 48425d6c..00000000 --- a/hdx_hapi/db/dao/population_status_view_dao.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging - -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy import select - -from hdx_hapi.db.models.views.db_population_status_view import PopulationStatusView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter - -logger = logging.getLogger(__name__) - -async def population_statuses_view_list( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None, -): - - logger.info(f'population_statuses_view_list called with params: code={code}, description={description}') - - query = select(PopulationStatusView) - if code: - query = case_insensitive_filter(query, PopulationStatusView.code, code) - if description: - query = query.where(PopulationStatusView.description.icontains(description)) - - query = apply_pagination(query, pagination_parameters) - - logger.debug(f'Executing SQL query: {query}') - - result = await db.execute(query) - population_statuses = result.scalars().all() - - logger.info(f'Retrieved {len(population_statuses)} rows from the database') - - return population_statuses \ No newline at end of file diff --git a/hdx_hapi/db/dao/population_view_dao.py b/hdx_hapi/db/dao/population_view_dao.py index a8927ffc..22fc4878 100644 --- a/hdx_hapi/db/dao/population_view_dao.py +++ b/hdx_hapi/db/dao/population_view_dao.py @@ -1,81 +1,86 @@ import logging -import datetime -from typing import Dict +from typing import Optional, Sequence from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_population_view import PopulationView -from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hapi_schema.utils.enums import Gender + +from hdx_hapi.db.models.views.all_views import PopulationView +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + apply_reference_period_filter, + case_insensitive_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters logger = logging.getLogger(__name__) + async def populations_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - gender_code: str = None, - age_range_code: str = None, - population: int = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min: datetime = None, - resource_update_date_max: datetime = None, - location_code: str = None, - location_name: str = None, - admin1_name: str = None, - admin1_code: str = None, - admin1_is_unspecified: bool = None, - admin2_code: str = None, - admin2_name: str = None, - admin2_is_unspecified: bool = None, -): - + gender: Optional[Gender] = None, + age_range: Optional[str] = None, + population_min: Optional[int] = None, + population_max: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Sequence[PopulationView]: logger.info( - f'populations_view_list called with params: gender_code={gender_code}, age_range_code={age_range_code}, ' \ - f'population={population}, dataset_hdx_provider_stub={dataset_hdx_provider_stub}, ' \ - f'resource_update_date_min={resource_update_date_min}, resource_update_date_max={resource_update_date_max}, ' \ - f'location_code={location_code}, location_name={location_name}, admin1_name={admin1_name}, ' \ - f'admin1_code={admin1_code}, admin1_is_unspecified={admin1_is_unspecified}, admin2_code={admin2_code}, ' \ + f'populations_view_list called with params: gender={gender}, age_range={age_range}, ' + f'population_min={population_min}, population_max={population_max},' + f'location_code={location_code}, location_name={location_name}, admin1_name={admin1_name}, ' + f'admin1_code={admin1_code}, admin1_is_unspecified={admin1_is_unspecified}, admin2_code={admin2_code}, ' f'admin2_name={admin2_name}, admin2_is_unspecified={admin2_is_unspecified}' + f'ref_period_parameters={ref_period_parameters}' ) query = select(PopulationView) - if gender_code: - query = case_insensitive_filter(query, PopulationView.gender_code, gender_code) - if age_range_code: - query = query.where(PopulationView.age_range_code == age_range_code) - if population: - query = query.where(PopulationView.population == population) - if dataset_hdx_provider_stub: - query = case_insensitive_filter(query, PopulationView.dataset_hdx_provider_stub, dataset_hdx_provider_stub) - if resource_update_date_min: - query = query.where(PopulationView.resource_update_date >= resource_update_date_min) - if resource_update_date_max: - query = query.where(PopulationView.resource_update_date < resource_update_date_max) - if location_code: - query = case_insensitive_filter(query, PopulationView.location_code, location_code) - if location_name: - query = query.where(PopulationView.location_name.icontains(location_name)) - if admin1_name: - query = query.where(PopulationView.admin1_name.icontains(admin1_name)) - if admin1_code: - query = case_insensitive_filter(query, PopulationView.admin1_code, admin1_code) - if admin1_is_unspecified is not None: - query = query.where(PopulationView.admin1_is_unspecified == admin1_is_unspecified) - if admin2_code: - query = case_insensitive_filter(query, PopulationView.admin2_code, admin2_code) - if admin2_name: - query = query.where(PopulationView.admin2_name.icontains(admin2_name)) - if admin2_is_unspecified is not None: - query = query.where(PopulationView.admin2_is_unspecified == admin2_is_unspecified) + if gender: + query = query.where(PopulationView.gender == gender) + if age_range: + query = case_insensitive_filter(query, PopulationView.age_range, age_range) + if population_min: + query = query.where(PopulationView.population >= population_min) + if population_max: + query = query.where(PopulationView.population < population_max) + query = apply_location_admin_filter( + query, + PopulationView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) - query = apply_pagination(query, pagination_parameters) + query = apply_reference_period_filter(query, ref_period_parameters, PopulationView) - logger.debug(f'Executing SQL query: {query}') + query = apply_pagination(query, pagination_parameters) + logger.info(f'Executing SQL query: {query}') result = await db.execute(query) populations = result.scalars().all() logger.info(f'Retrieved {len(populations)} rows from the database') - return populations \ No newline at end of file + return populations diff --git a/hdx_hapi/db/dao/poverty_rate_dao.py b/hdx_hapi/db/dao/poverty_rate_dao.py new file mode 100644 index 00000000..0b77ac2e --- /dev/null +++ b/hdx_hapi/db/dao/poverty_rate_dao.py @@ -0,0 +1,49 @@ +from typing import Optional + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import PovertyRateView +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + apply_reference_period_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +async def poverty_rates_view_list( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + mpi_min: Optional[float] = None, + mpi_max: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_name: Optional[str] = None, +): + query = select(PovertyRateView) + + if mpi_min: + query = query.where(PovertyRateView.mpi >= mpi_min) + if mpi_max: + query = query.where(PovertyRateView.mpi < mpi_max) + + query = apply_location_admin_filter( + query, + PovertyRateView, + None, + location_code, + location_name, + None, + None, + admin1_name, + ) + + query = apply_reference_period_filter(query, ref_period_parameters, PovertyRateView) + + query = apply_pagination(query, pagination_parameters) + + result = await db.execute(query) + poverty_rates = result.scalars().all() + return poverty_rates diff --git a/hdx_hapi/db/dao/refugees_view_dao.py b/hdx_hapi/db/dao/refugees_view_dao.py new file mode 100644 index 00000000..127d065a --- /dev/null +++ b/hdx_hapi/db/dao/refugees_view_dao.py @@ -0,0 +1,57 @@ +from typing import Optional + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import RefugeesView +from hdx_hapi.db.dao.util.util import ( + apply_pagination, + apply_reference_period_filter, + case_insensitive_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters +from hapi_schema.utils.enums import Gender, PopulationGroup + + +async def refugees_view_list( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + population_group: Optional[PopulationGroup] = None, + population_min: Optional[int] = None, + population_max: Optional[int] = None, + gender: Optional[Gender] = None, + age_range: Optional[str] = None, + origin_location_code: Optional[str] = None, + origin_location_name: Optional[str] = None, + asylum_location_code: Optional[str] = None, + asylum_location_name: Optional[str] = None, +): + query = select(RefugeesView) + + if gender: + query = query.where(RefugeesView.gender == gender) + if age_range: + query = query.where(RefugeesView.age_range == age_range) + if population_group: + query = query.where(RefugeesView.population_group == population_group) + if population_min: + query = query.where(RefugeesView.population >= population_min) + if population_max: + query = query.where(RefugeesView.population < population_max) + if origin_location_code: + query = case_insensitive_filter(query, RefugeesView.origin_location_code, origin_location_code) + if origin_location_name: + query = query.where(RefugeesView.origin_location_name.icontains(origin_location_name)) + if asylum_location_code: + query = case_insensitive_filter(query, RefugeesView.asylum_location_code, asylum_location_code) + if asylum_location_name: + query = query.where(RefugeesView.asylum_location_name.icontains(asylum_location_name)) + + query = apply_reference_period_filter(query, ref_period_parameters, RefugeesView) + + query = apply_pagination(query, pagination_parameters) + + result = await db.execute(query) + refugees = result.scalars().all() + return refugees diff --git a/hdx_hapi/db/dao/resource_view_dao.py b/hdx_hapi/db/dao/resource_view_dao.py index 115738aa..709cef18 100644 --- a/hdx_hapi/db/dao/resource_view_dao.py +++ b/hdx_hapi/db/dao/resource_view_dao.py @@ -1,29 +1,31 @@ -from typing import Dict from datetime import datetime from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_resource_view import ResourceView +from hdx_hapi.db.models.views.all_views import ResourceView from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams + async def resources_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, - hdx_id: str = None, + resource_hdx_id: str = None, format: str = None, update_date_min: datetime = None, update_date_max: datetime = None, is_hxl: bool = None, - dataset_title: str = None, + hapi_updated_date_min: datetime = None, + hapi_updated_date_max: datetime = None, + dataset_hdx_title: str = None, dataset_hdx_id: str = None, dataset_hdx_stub: str = None, dataset_hdx_provider_stub: str = None, dataset_hdx_provider_name: str = None, ): - query = select(ResourceView) - if hdx_id: - query = query.where(ResourceView.hdx_id == hdx_id) + if resource_hdx_id: + query = query.where(ResourceView.resource_hdx_id == resource_hdx_id) if format: query = query.where(ResourceView.format == format) if update_date_min: @@ -32,8 +34,12 @@ async def resources_view_list( query = query.where(ResourceView.update_date < update_date_max) if is_hxl is not None: query = query.where(ResourceView.is_hxl == is_hxl) - if dataset_title: - query = query.where(ResourceView.dataset_title == dataset_title) + if hapi_updated_date_min: + query = query.where(ResourceView.hapi_updated_date >= hapi_updated_date_min) + if hapi_updated_date_max: + query = query.where(ResourceView.hapi_updated_date < hapi_updated_date_max) + if dataset_hdx_title: + query = query.where(ResourceView.dataset_hdx_title.icontains(dataset_hdx_title)) if dataset_hdx_id: query = query.where(ResourceView.dataset_hdx_id == dataset_hdx_id) if dataset_hdx_stub: @@ -47,4 +53,4 @@ async def resources_view_list( result = await db.execute(query) resources = result.scalars().all() - return resources \ No newline at end of file + return resources diff --git a/hdx_hapi/db/dao/sector_view_dao.py b/hdx_hapi/db/dao/sector_view_dao.py index 614c30d9..daa9df06 100644 --- a/hdx_hapi/db/dao/sector_view_dao.py +++ b/hdx_hapi/db/dao/sector_view_dao.py @@ -1,23 +1,22 @@ import logging -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy import select -from hdx_hapi.db.models.views.db_sector_view import SectorView +from hdx_hapi.db.models.views.all_views import SectorView from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter +from hdx_hapi.endpoints.util.util import PaginationParams logger = logging.getLogger(__name__) + async def sectors_view_list( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, code: str = None, name: str = None, ): - logger.info(f'sectors_view_list called with params: code={code}, name={name}') query = select(SectorView) @@ -35,4 +34,4 @@ async def sectors_view_list( logger.info(f'Retrieved {len(sectors)} rows from the database') - return sectors \ No newline at end of file + return sectors diff --git a/hdx_hapi/db/dao/util/util.py b/hdx_hapi/db/dao/util/util.py index d42412a3..cc5911ba 100644 --- a/hdx_hapi/db/dao/util/util.py +++ b/hdx_hapi/db/dao/util/util.py @@ -1,9 +1,13 @@ -from typing import Dict -from sqlalchemy import Column, Select +from typing import Optional, Protocol, Type +from sqlalchemy import DateTime, Select +from sqlalchemy.orm import Mapped -def apply_pagination(query: Select, pagination_parameters: Dict) -> Select: - offset = pagination_parameters.get('offset') - limit = pagination_parameters.get('limit') +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +def apply_pagination(query: Select, pagination_parameters: PaginationParams) -> Select: + offset = pagination_parameters.offset + limit = pagination_parameters.limit if not offset: offset = 0 if not limit: @@ -11,6 +15,86 @@ def apply_pagination(query: Select, pagination_parameters: Dict) -> Select: return query.limit(limit).offset(offset) -def case_insensitive_filter(query: Select, column: Column, value: str) -> Select: + +class EntityWithReferencePeriod(Protocol): + reference_period_start: Mapped[DateTime] + reference_period_end: Mapped[DateTime] + + +def apply_reference_period_filter( + query: Select, + ref_period_parameters: ReferencePeriodParameters, + db_class: Type[EntityWithReferencePeriod], +) -> Select: + if ref_period_parameters is None: + return query + + if ref_period_parameters.reference_period_start_min: + query = query.where(db_class.reference_period_start >= ref_period_parameters.reference_period_start_min) + if ref_period_parameters.reference_period_start_max: + query = query.where(db_class.reference_period_start < ref_period_parameters.reference_period_start_max) + if ref_period_parameters.reference_period_end_min: + query = query.where(db_class.reference_period_end >= ref_period_parameters.reference_period_end_min) + if ref_period_parameters.reference_period_end_max: + query = query.where(db_class.reference_period_end < ref_period_parameters.reference_period_end_max) + return query + + +class EntityWithLocationAdmin(Protocol): + location_ref: Mapped[int] + location_code: Mapped[str] + location_name: Mapped[str] + admin1_ref: Mapped[int] + admin1_code: Mapped[str] + admin1_name: Mapped[str] + admin1_is_unspecified: Mapped[bool] + admin2_ref: Mapped[int] + admin2_code: Mapped[str] + admin2_name: Mapped[str] + admin2_is_unspecified: Mapped[bool] + + +def apply_location_admin_filter( + query: Select, + db_class: Type[EntityWithLocationAdmin], + location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Select: + if location_ref: + query = query.where(db_class.location_ref == location_ref) + if location_code: + query = case_insensitive_filter(query, db_class.location_code, location_code) + if location_name: + query = query.where(db_class.location_name.icontains(location_name)) + if admin1_ref: + query = query.where(db_class.admin1_ref == admin1_ref) + if admin1_code: + query = case_insensitive_filter(query, db_class.admin1_code, admin1_code) + if admin1_name: + query = query.where(db_class.admin1_name.icontains(admin1_name)) + if admin2_ref: + query = query.where(db_class.admin2_ref == admin2_ref) + if admin2_code: + query = case_insensitive_filter(query, db_class.admin2_code, admin2_code) + if admin2_name: + query = query.where(db_class.admin2_name.icontains(admin2_name)) + if admin1_is_unspecified is not None: + query = query.where(db_class.admin1_is_unspecified == admin1_is_unspecified) + if admin2_is_unspecified is not None: + query = query.where(db_class.admin2_is_unspecified == admin2_is_unspecified) + + return query + + +def case_insensitive_filter(query: Select, column: Mapped[str], value: str) -> Select: query = query.where(column.ilike(value)) return query diff --git a/hdx_hapi/db/dao/wfp_commodity_view_dao.py b/hdx_hapi/db/dao/wfp_commodity_view_dao.py new file mode 100644 index 00000000..d60009cf --- /dev/null +++ b/hdx_hapi/db/dao/wfp_commodity_view_dao.py @@ -0,0 +1,38 @@ +import logging +from typing import Optional, Sequence + +from hapi_schema.utils.enums import CommodityCategory +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.models.views.all_views import WfpCommodityView +from hdx_hapi.db.dao.util.util import ( + apply_pagination, + case_insensitive_filter, +) +from hdx_hapi.endpoints.util.util import PaginationParams + + +logger = logging.getLogger(__name__) + + +async def wfp_commodity_view_list( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, + category: Optional[CommodityCategory] = None, + name: Optional[str] = None, +) -> Sequence[WfpCommodityView]: + query = select(WfpCommodityView) + if code: + query = case_insensitive_filter(query, WfpCommodityView.code, code) + if category: + query = query.where(WfpCommodityView.category == category) + if name: + query = query.where(WfpCommodityView.name.icontains(name)) + + query = apply_pagination(query, pagination_parameters) + + result = await db.execute(query) + wfp_commodities = result.scalars().all() + return wfp_commodities diff --git a/hdx_hapi/db/dao/wfp_market_view_dao.py b/hdx_hapi/db/dao/wfp_market_view_dao.py new file mode 100644 index 00000000..6ac6e51f --- /dev/null +++ b/hdx_hapi/db/dao/wfp_market_view_dao.py @@ -0,0 +1,64 @@ +import logging +from typing import Optional, Sequence + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select + +from hdx_hapi.db.dao.util.util import ( + apply_location_admin_filter, + apply_pagination, + case_insensitive_filter, +) +from hdx_hapi.db.models.views.all_views import WfpMarketView +from hdx_hapi.endpoints.util.util import PaginationParams + + +logger = logging.getLogger(__name__) + + +async def wfp_market_view_list( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, + name: Optional[str] = None, + # lat: Optional[float] = None, + # lon: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin2_is_unspecified: Optional[bool] = None, +) -> Sequence[WfpMarketView]: + query = select(WfpMarketView) + if code: + query = case_insensitive_filter(query, WfpMarketView.code, code) + if name: + query = query.where(WfpMarketView.name.icontains(name)) + query = apply_location_admin_filter( + query, + WfpMarketView, + location_ref, + location_code, + location_name, + admin1_ref, + admin1_code, + admin1_name, + admin1_is_unspecified, + admin2_ref, + admin2_code, + admin2_name, + admin2_is_unspecified, + ) + + query = apply_pagination(query, pagination_parameters) + + result = await db.execute(query) + wfp_markets = result.scalars().all() + + return wfp_markets diff --git a/hdx_hapi/db/models/views/all_views.py b/hdx_hapi/db/models/views/all_views.py new file mode 100644 index 00000000..98ed64e9 --- /dev/null +++ b/hdx_hapi/db/models/views/all_views.py @@ -0,0 +1,423 @@ +""" +This code was generated automatically using src/hapi_schema/utils/hapi_views_code_generator.py +""" + +from decimal import Decimal +from sqlalchemy import DateTime +from sqlalchemy.orm import column_property, Mapped +from hdx_hapi.db.models.views.util.util import view +from hdx_hapi.db.models.base import Base +from hapi_schema.db_admin1 import view_params_admin1 +from hapi_schema.db_admin2 import view_params_admin2 +from hapi_schema.db_conflict_event import view_params_conflict_event +from hapi_schema.db_currency import view_params_currency +from hapi_schema.db_dataset import view_params_dataset +from hapi_schema.db_food_price import view_params_food_price +from hapi_schema.db_food_security import view_params_food_security +from hapi_schema.db_funding import view_params_funding +from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs +from hapi_schema.db_location import view_params_location +from hapi_schema.db_national_risk import view_params_national_risk +from hapi_schema.db_operational_presence import view_params_operational_presence +from hapi_schema.db_org_type import view_params_org_type +from hapi_schema.db_org import view_params_org +from hapi_schema.db_population import view_params_population +from hapi_schema.db_poverty_rate import view_params_poverty_rate +from hapi_schema.db_refugees import view_params_refugees +from hapi_schema.db_resource import view_params_resource +from hapi_schema.db_sector import view_params_sector +from hapi_schema.db_wfp_commodity import view_params_wfp_commodity +from hapi_schema.db_wfp_market import view_params_wfp_market +from hapi_schema.db_patch import view_params_patch +from hapi_schema.utils.enums import ( + CommodityCategory, + DisabledMarker, + EventType, + IPCPhase, + IPCType, + PopulationGroup, + PopulationStatus, + PriceFlag, + PriceType, + RiskClass, + Gender, +) + +admin1_view = view(view_params_admin1.name, Base.metadata, view_params_admin1.selectable) +admin2_view = view(view_params_admin2.name, Base.metadata, view_params_admin2.selectable) +conflict_event_view = view(view_params_conflict_event.name, Base.metadata, view_params_conflict_event.selectable) +currency_view = view(view_params_currency.name, Base.metadata, view_params_currency.selectable) +dataset_view = view(view_params_dataset.name, Base.metadata, view_params_dataset.selectable) +food_price_view = view(view_params_food_price.name, Base.metadata, view_params_food_price.selectable) +food_security_view = view(view_params_food_security.name, Base.metadata, view_params_food_security.selectable) +funding_view = view(view_params_funding.name, Base.metadata, view_params_funding.selectable) +humanitarian_needs_view = view( + view_params_humanitarian_needs.name, Base.metadata, view_params_humanitarian_needs.selectable +) +location_view = view(view_params_location.name, Base.metadata, view_params_location.selectable) +national_risk_view = view(view_params_national_risk.name, Base.metadata, view_params_national_risk.selectable) +operational_presence_view = view( + view_params_operational_presence.name, Base.metadata, view_params_operational_presence.selectable +) +org_type_view = view(view_params_org_type.name, Base.metadata, view_params_org_type.selectable) +org_view = view(view_params_org.name, Base.metadata, view_params_org.selectable) +population_view = view(view_params_population.name, Base.metadata, view_params_population.selectable) +poverty_rate_view = view(view_params_poverty_rate.name, Base.metadata, view_params_poverty_rate.selectable) +refugees_view = view(view_params_refugees.name, Base.metadata, view_params_refugees.selectable) +resource_view = view(view_params_resource.name, Base.metadata, view_params_resource.selectable) +sector_view = view(view_params_sector.name, Base.metadata, view_params_sector.selectable) +wfp_commodity_view = view(view_params_wfp_commodity.name, Base.metadata, view_params_wfp_commodity.selectable) +wfp_market_view = view(view_params_wfp_market.name, Base.metadata, view_params_wfp_market.selectable) +patch_view = view(view_params_patch.name, Base.metadata, view_params_patch.selectable) + + +class Admin1View(Base): + __table__ = admin1_view + id: Mapped[int] = column_property(admin1_view.c.id) + location_ref: Mapped[int] = column_property(admin1_view.c.location_ref) + code: Mapped[str] = column_property(admin1_view.c.code) + name: Mapped[str] = column_property(admin1_view.c.name) + is_unspecified: Mapped[bool] = column_property(admin1_view.c.is_unspecified) + from_cods: Mapped[bool] = column_property(admin1_view.c.from_cods) + reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.reference_period_end) + location_code: Mapped[str] = column_property(admin1_view.c.location_code) + location_name: Mapped[str] = column_property(admin1_view.c.location_name) + + +class Admin2View(Base): + __table__ = admin2_view + id: Mapped[int] = column_property(admin2_view.c.id) + admin1_ref: Mapped[int] = column_property(admin2_view.c.admin1_ref) + code: Mapped[str] = column_property(admin2_view.c.code) + name: Mapped[str] = column_property(admin2_view.c.name) + is_unspecified: Mapped[bool] = column_property(admin2_view.c.is_unspecified) + from_cods: Mapped[bool] = column_property(admin2_view.c.from_cods) + reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.reference_period_end) + admin1_code: Mapped[str] = column_property(admin2_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(admin2_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(admin2_view.c.admin1_is_unspecified) + location_code: Mapped[str] = column_property(admin2_view.c.location_code) + location_name: Mapped[str] = column_property(admin2_view.c.location_name) + + +class ConflictEventView(Base): + __table__ = conflict_event_view + resource_hdx_id: Mapped[str] = column_property(conflict_event_view.c.resource_hdx_id) + admin2_ref: Mapped[int] = column_property(conflict_event_view.c.admin2_ref) + event_type: Mapped[EventType] = column_property(conflict_event_view.c.event_type) + events: Mapped[int] = column_property(conflict_event_view.c.events) + fatalities: Mapped[int] = column_property(conflict_event_view.c.fatalities) + reference_period_start: Mapped[DateTime] = column_property(conflict_event_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(conflict_event_view.c.reference_period_end) + location_code: Mapped[str] = column_property(conflict_event_view.c.location_code) + location_name: Mapped[str] = column_property(conflict_event_view.c.location_name) + admin1_code: Mapped[str] = column_property(conflict_event_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(conflict_event_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(conflict_event_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(conflict_event_view.c.location_ref) + admin2_code: Mapped[str] = column_property(conflict_event_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(conflict_event_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(conflict_event_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(conflict_event_view.c.admin1_ref) + + +class CurrencyView(Base): + __table__ = currency_view + code: Mapped[str] = column_property(currency_view.c.code) + name: Mapped[str] = column_property(currency_view.c.name) + + +class DatasetView(Base): + __table__ = dataset_view + dataset_hdx_id: Mapped[str] = column_property(dataset_view.c.hdx_id) + dataset_hdx_stub: Mapped[str] = column_property(dataset_view.c.hdx_stub) + dataset_hdx_title: Mapped[str] = column_property(dataset_view.c.title) + hdx_provider_stub: Mapped[str] = column_property(dataset_view.c.hdx_provider_stub) + hdx_provider_name: Mapped[str] = column_property(dataset_view.c.hdx_provider_name) + + +class FoodPriceView(Base): + __table__ = food_price_view + resource_hdx_id: Mapped[str] = column_property(food_price_view.c.resource_hdx_id) + market_code: Mapped[str] = column_property(food_price_view.c.market_code) + commodity_code: Mapped[str] = column_property(food_price_view.c.commodity_code) + currency_code: Mapped[str] = column_property(food_price_view.c.currency_code) + unit: Mapped[str] = column_property(food_price_view.c.unit) + price_flag: Mapped[PriceFlag] = column_property(food_price_view.c.price_flag) + price_type: Mapped[PriceType] = column_property(food_price_view.c.price_type) + price: Mapped[Decimal] = column_property(food_price_view.c.price) + reference_period_start: Mapped[DateTime] = column_property(food_price_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(food_price_view.c.reference_period_end) + admin2_ref: Mapped[int] = column_property(food_price_view.c.admin2_ref) + market_name: Mapped[str] = column_property(food_price_view.c.market_name) + lat: Mapped[float] = column_property(food_price_view.c.lat) + lon: Mapped[float] = column_property(food_price_view.c.lon) + commodity_category: Mapped[CommodityCategory] = column_property(food_price_view.c.commodity_category) + commodity_name: Mapped[str] = column_property(food_price_view.c.commodity_name) + location_code: Mapped[str] = column_property(food_price_view.c.location_code) + location_name: Mapped[str] = column_property(food_price_view.c.location_name) + location_ref: Mapped[int] = column_property(food_price_view.c.location_ref) + admin1_code: Mapped[str] = column_property(food_price_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(food_price_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(food_price_view.c.admin1_is_unspecified) + admin1_ref: Mapped[int] = column_property(food_price_view.c.admin1_ref) + admin2_code: Mapped[str] = column_property(food_price_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(food_price_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(food_price_view.c.admin2_is_unspecified) + + +class FoodSecurityView(Base): + __table__ = food_security_view + resource_hdx_id: Mapped[str] = column_property(food_security_view.c.resource_hdx_id) + admin2_ref: Mapped[int] = column_property(food_security_view.c.admin2_ref) + ipc_phase: Mapped[IPCPhase] = column_property(food_security_view.c.ipc_phase) + ipc_type: Mapped[IPCType] = column_property(food_security_view.c.ipc_type) + population_in_phase: Mapped[int] = column_property(food_security_view.c.population_in_phase) + population_fraction_in_phase: Mapped[float] = column_property(food_security_view.c.population_fraction_in_phase) + reference_period_start: Mapped[DateTime] = column_property(food_security_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(food_security_view.c.reference_period_end) + location_code: Mapped[str] = column_property(food_security_view.c.location_code) + location_name: Mapped[str] = column_property(food_security_view.c.location_name) + admin1_code: Mapped[str] = column_property(food_security_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(food_security_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(food_security_view.c.location_ref) + admin2_code: Mapped[str] = column_property(food_security_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(food_security_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(food_security_view.c.admin1_ref) + + +class FundingView(Base): + __table__ = funding_view + resource_hdx_id: Mapped[str] = column_property(funding_view.c.resource_hdx_id) + appeal_code: Mapped[str] = column_property(funding_view.c.appeal_code) + location_ref: Mapped[int] = column_property(funding_view.c.location_ref) + appeal_name: Mapped[str] = column_property(funding_view.c.appeal_name) + appeal_type: Mapped[str] = column_property(funding_view.c.appeal_type) + requirements_usd: Mapped[Decimal] = column_property(funding_view.c.requirements_usd) + funding_usd: Mapped[Decimal] = column_property(funding_view.c.funding_usd) + funding_pct: Mapped[Decimal] = column_property(funding_view.c.funding_pct) + reference_period_start: Mapped[DateTime] = column_property(funding_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(funding_view.c.reference_period_end) + location_code: Mapped[str] = column_property(funding_view.c.location_code) + location_name: Mapped[str] = column_property(funding_view.c.location_name) + + +class HumanitarianNeedsView(Base): + __table__ = humanitarian_needs_view + resource_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.resource_hdx_id) + admin2_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin2_ref) + gender: Mapped[Gender] = column_property(humanitarian_needs_view.c.gender) + age_range: Mapped[str] = column_property(humanitarian_needs_view.c.age_range) + min_age: Mapped[int] = column_property(humanitarian_needs_view.c.min_age) + max_age: Mapped[int] = column_property(humanitarian_needs_view.c.max_age) + sector_code: Mapped[str] = column_property(humanitarian_needs_view.c.sector_code) + population_group: Mapped[PopulationGroup] = column_property(humanitarian_needs_view.c.population_group) + population_status: Mapped[PopulationStatus] = column_property(humanitarian_needs_view.c.population_status) + disabled_marker: Mapped[DisabledMarker] = column_property(humanitarian_needs_view.c.disabled_marker) + population: Mapped[int] = column_property(humanitarian_needs_view.c.population) + reference_period_start: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_end) + sector_name: Mapped[str] = column_property(humanitarian_needs_view.c.sector_name) + location_code: Mapped[str] = column_property(humanitarian_needs_view.c.location_code) + location_name: Mapped[str] = column_property(humanitarian_needs_view.c.location_name) + location_ref: Mapped[int] = column_property(humanitarian_needs_view.c.location_ref) + admin1_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin1_is_unspecified) + admin2_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin1_ref) + + +class LocationView(Base): + __table__ = location_view + id: Mapped[int] = column_property(location_view.c.id) + code: Mapped[str] = column_property(location_view.c.code) + name: Mapped[str] = column_property(location_view.c.name) + from_cods: Mapped[bool] = column_property(location_view.c.from_cods) + reference_period_start: Mapped[DateTime] = column_property(location_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(location_view.c.reference_period_end) + + +class NationalRiskView(Base): + __table__ = national_risk_view + resource_hdx_id: Mapped[str] = column_property(national_risk_view.c.resource_hdx_id) + location_ref: Mapped[int] = column_property(national_risk_view.c.location_ref) + risk_class: Mapped[RiskClass] = column_property(national_risk_view.c.risk_class) + global_rank: Mapped[int] = column_property(national_risk_view.c.global_rank) + overall_risk: Mapped[float] = column_property(national_risk_view.c.overall_risk) + hazard_exposure_risk: Mapped[float] = column_property(national_risk_view.c.hazard_exposure_risk) + vulnerability_risk: Mapped[float] = column_property(national_risk_view.c.vulnerability_risk) + coping_capacity_risk: Mapped[float] = column_property(national_risk_view.c.coping_capacity_risk) + meta_missing_indicators_pct: Mapped[float] = column_property(national_risk_view.c.meta_missing_indicators_pct) + meta_avg_recentness_years: Mapped[float] = column_property(national_risk_view.c.meta_avg_recentness_years) + reference_period_start: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_end) + location_code: Mapped[str] = column_property(national_risk_view.c.location_code) + location_name: Mapped[str] = column_property(national_risk_view.c.location_name) + + +class OperationalPresenceView(Base): + __table__ = operational_presence_view + resource_hdx_id: Mapped[str] = column_property(operational_presence_view.c.resource_hdx_id) + admin2_ref: Mapped[int] = column_property(operational_presence_view.c.admin2_ref) + org_acronym: Mapped[str] = column_property(operational_presence_view.c.org_acronym) + org_name: Mapped[str] = column_property(operational_presence_view.c.org_name) + sector_code: Mapped[str] = column_property(operational_presence_view.c.sector_code) + reference_period_start: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_end) + org_type_code: Mapped[str] = column_property(operational_presence_view.c.org_type_code) + org_type_description: Mapped[str] = column_property(operational_presence_view.c.org_type_description) + sector_name: Mapped[str] = column_property(operational_presence_view.c.sector_name) + location_code: Mapped[str] = column_property(operational_presence_view.c.location_code) + location_name: Mapped[str] = column_property(operational_presence_view.c.location_name) + admin1_code: Mapped[str] = column_property(operational_presence_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(operational_presence_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(operational_presence_view.c.location_ref) + admin2_code: Mapped[str] = column_property(operational_presence_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(operational_presence_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(operational_presence_view.c.admin1_ref) + + +class OrgTypeView(Base): + __table__ = org_type_view + code: Mapped[str] = column_property(org_type_view.c.code) + description: Mapped[str] = column_property(org_type_view.c.description) + + +class OrgView(Base): + __table__ = org_view + acronym: Mapped[str] = column_property(org_view.c.acronym) + name: Mapped[str] = column_property(org_view.c.name) + org_type_code: Mapped[str] = column_property(org_view.c.org_type_code) + org_type_description: Mapped[str] = column_property(org_view.c.org_type_description) + + +class PopulationView(Base): + __table__ = population_view + resource_hdx_id: Mapped[str] = column_property(population_view.c.resource_hdx_id) + admin2_ref: Mapped[int] = column_property(population_view.c.admin2_ref) + gender: Mapped[Gender] = column_property(population_view.c.gender) + age_range: Mapped[str] = column_property(population_view.c.age_range) + min_age: Mapped[int] = column_property(population_view.c.min_age) + max_age: Mapped[int] = column_property(population_view.c.max_age) + population: Mapped[int] = column_property(population_view.c.population) + reference_period_start: Mapped[DateTime] = column_property(population_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(population_view.c.reference_period_end) + location_code: Mapped[str] = column_property(population_view.c.location_code) + location_name: Mapped[str] = column_property(population_view.c.location_name) + admin1_code: Mapped[str] = column_property(population_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(population_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(population_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(population_view.c.location_ref) + admin2_code: Mapped[str] = column_property(population_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(population_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(population_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(population_view.c.admin1_ref) + + +class PovertyRateView(Base): + __table__ = poverty_rate_view + resource_hdx_id: Mapped[str] = column_property(poverty_rate_view.c.resource_hdx_id) + admin1_ref: Mapped[int] = column_property(poverty_rate_view.c.admin1_ref) + admin1_name: Mapped[str] = column_property(poverty_rate_view.c.admin1_name) + mpi: Mapped[float] = column_property(poverty_rate_view.c.mpi) + headcount_ratio: Mapped[float] = column_property(poverty_rate_view.c.headcount_ratio) + intensity_of_deprivation: Mapped[float] = column_property(poverty_rate_view.c.intensity_of_deprivation) + vulnerable_to_poverty: Mapped[float] = column_property(poverty_rate_view.c.vulnerable_to_poverty) + in_severe_poverty: Mapped[float] = column_property(poverty_rate_view.c.in_severe_poverty) + reference_period_start: Mapped[DateTime] = column_property(poverty_rate_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(poverty_rate_view.c.reference_period_end) + location_code: Mapped[str] = column_property(poverty_rate_view.c.location_code) + location_name: Mapped[str] = column_property(poverty_rate_view.c.location_name) + admin1_code: Mapped[str] = column_property(poverty_rate_view.c.admin1_code) + admin1_is_unspecified: Mapped[bool] = column_property(poverty_rate_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(poverty_rate_view.c.location_ref) + + +class RefugeesView(Base): + __table__ = refugees_view + resource_hdx_id: Mapped[str] = column_property(refugees_view.c.resource_hdx_id) + origin_location_ref: Mapped[int] = column_property(refugees_view.c.origin_location_ref) + asylum_location_ref: Mapped[int] = column_property(refugees_view.c.asylum_location_ref) + population_group: Mapped[PopulationGroup] = column_property(refugees_view.c.population_group) + gender: Mapped[Gender] = column_property(refugees_view.c.gender) + age_range: Mapped[str] = column_property(refugees_view.c.age_range) + min_age: Mapped[int] = column_property(refugees_view.c.min_age) + max_age: Mapped[int] = column_property(refugees_view.c.max_age) + population: Mapped[int] = column_property(refugees_view.c.population) + reference_period_start: Mapped[DateTime] = column_property(refugees_view.c.reference_period_start) + reference_period_end: Mapped[DateTime] = column_property(refugees_view.c.reference_period_end) + origin_location_code: Mapped[str] = column_property(refugees_view.c.origin_location_code) + origin_location_name: Mapped[str] = column_property(refugees_view.c.origin_location_name) + asylum_location_code: Mapped[str] = column_property(refugees_view.c.asylum_location_code) + asylum_location_name: Mapped[str] = column_property(refugees_view.c.asylum_location_name) + + +class ResourceView(Base): + __table__ = resource_view + resource_hdx_id: Mapped[str] = column_property(resource_view.c.hdx_id) + dataset_hdx_id: Mapped[str] = column_property(resource_view.c.dataset_hdx_id) + name: Mapped[str] = column_property(resource_view.c.name) + format: Mapped[str] = column_property(resource_view.c.format) + update_date: Mapped[DateTime] = column_property(resource_view.c.update_date) + is_hxl: Mapped[bool] = column_property(resource_view.c.is_hxl) + download_url: Mapped[str] = column_property(resource_view.c.download_url) + hapi_updated_date: Mapped[DateTime] = column_property(resource_view.c.hapi_updated_date) + dataset_hdx_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_stub) + dataset_hdx_title: Mapped[str] = column_property(resource_view.c.dataset_title) + dataset_hdx_provider_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_stub) + dataset_hdx_provider_name: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_name) + + +class SectorView(Base): + __table__ = sector_view + code: Mapped[str] = column_property(sector_view.c.code) + name: Mapped[str] = column_property(sector_view.c.name) + + +class WfpCommodityView(Base): + __table__ = wfp_commodity_view + code: Mapped[str] = column_property(wfp_commodity_view.c.code) + category: Mapped[CommodityCategory] = column_property(wfp_commodity_view.c.category) + name: Mapped[str] = column_property(wfp_commodity_view.c.name) + + +class WfpMarketView(Base): + __table__ = wfp_market_view + code: Mapped[str] = column_property(wfp_market_view.c.code) + admin2_ref: Mapped[int] = column_property(wfp_market_view.c.admin2_ref) + name: Mapped[str] = column_property(wfp_market_view.c.name) + lat: Mapped[float] = column_property(wfp_market_view.c.lat) + lon: Mapped[float] = column_property(wfp_market_view.c.lon) + location_code: Mapped[str] = column_property(wfp_market_view.c.location_code) + location_name: Mapped[str] = column_property(wfp_market_view.c.location_name) + admin1_code: Mapped[str] = column_property(wfp_market_view.c.admin1_code) + admin1_name: Mapped[str] = column_property(wfp_market_view.c.admin1_name) + admin1_is_unspecified: Mapped[bool] = column_property(wfp_market_view.c.admin1_is_unspecified) + location_ref: Mapped[int] = column_property(wfp_market_view.c.location_ref) + admin2_code: Mapped[str] = column_property(wfp_market_view.c.admin2_code) + admin2_name: Mapped[str] = column_property(wfp_market_view.c.admin2_name) + admin2_is_unspecified: Mapped[bool] = column_property(wfp_market_view.c.admin2_is_unspecified) + admin1_ref: Mapped[int] = column_property(wfp_market_view.c.admin1_ref) + + +class PatchView(Base): + __table__ = patch_view + id: Mapped[int] = column_property(patch_view.c.id) + patch_sequence_number: Mapped[int] = column_property(patch_view.c.patch_sequence_number) + commit_hash: Mapped[str] = column_property(patch_view.c.commit_hash) + commit_date: Mapped[DateTime] = column_property(patch_view.c.commit_date) + patch_path: Mapped[str] = column_property(patch_view.c.patch_path) + patch_permalink_url: Mapped[str] = column_property(patch_view.c.patch_permalink_url) + patch_target: Mapped[str] = column_property(patch_view.c.patch_target) + patch_hash: Mapped[str] = column_property(patch_view.c.patch_hash) + state: Mapped[str] = column_property(patch_view.c.state) + execution_date: Mapped[DateTime] = column_property(patch_view.c.execution_date) diff --git a/hdx_hapi/db/models/views/db_admin1_view.py b/hdx_hapi/db/models/views/db_admin1_view.py deleted file mode 100644 index 76427bdb..00000000 --- a/hdx_hapi/db/models/views/db_admin1_view.py +++ /dev/null @@ -1,27 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import column_property, Mapped - -from hapi_schema.db_admin1 import view_params_admin1 - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -admin1_view = view(view_params_admin1.name, Base.metadata, view_params_admin1.selectable) - -class Admin1View(Base): - __table__ = admin1_view - - id: Mapped[int] = column_property(admin1_view.c.id) - location_ref: Mapped[int] = column_property(admin1_view.c.location_ref) - code: Mapped[str] = column_property(admin1_view.c.code) - name: Mapped[str] = column_property(admin1_view.c.name) - is_unspecified: Mapped[bool] = column_property(admin1_view.c.is_unspecified) - - reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.reference_period_end) - - location_code: Mapped[str] = column_property(admin1_view.c.location_code) - location_name: Mapped[str] = column_property(admin1_view.c.location_name) - location_reference_period_start: Mapped[DateTime] = column_property(admin1_view.c.location_reference_period_start) - location_reference_period_end: Mapped[DateTime] = column_property(admin1_view.c.location_reference_period_end) diff --git a/hdx_hapi/db/models/views/db_admin2_view.py b/hdx_hapi/db/models/views/db_admin2_view.py deleted file mode 100644 index 4eed4d58..00000000 --- a/hdx_hapi/db/models/views/db_admin2_view.py +++ /dev/null @@ -1,31 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_admin2 import view_params_admin2 -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - -admin2_view = view(view_params_admin2.name, Base.metadata, view_params_admin2.selectable) - - -class Admin2View(Base): - __table__ = admin2_view - - id: Mapped[int] = column_property(admin2_view.c.id) - admin1_ref: Mapped[int] = column_property(admin2_view.c.admin1_ref) - code: Mapped[str] = column_property(admin2_view.c.code) - name: Mapped[str] = column_property(admin2_view.c.name) - is_unspecified: Mapped[bool] = column_property(admin2_view.c.is_unspecified) - reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.reference_period_end) - - admin1_code: Mapped[str] = column_property(admin2_view.c.admin1_code) - admin1_name: Mapped[str] = column_property(admin2_view.c.admin1_name) - admin1_is_unspecified: Mapped[bool] = column_property(admin2_view.c.admin1_is_unspecified) - admin1_reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.admin1_reference_period_start) - admin1_reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.admin1_reference_period_end) - - location_code: Mapped[str] = column_property(admin2_view.c.location_code) - location_name: Mapped[str] = column_property(admin2_view.c.location_name) - location_reference_period_start: Mapped[DateTime] = column_property(admin2_view.c.location_reference_period_start) - location_reference_period_end: Mapped[DateTime] = column_property(admin2_view.c.location_reference_period_end) diff --git a/hdx_hapi/db/models/views/db_age_range_view.py b/hdx_hapi/db/models/views/db_age_range_view.py deleted file mode 100644 index e1973299..00000000 --- a/hdx_hapi/db/models/views/db_age_range_view.py +++ /dev/null @@ -1,15 +0,0 @@ -from sqlalchemy.orm import column_property, Mapped -from hapi_schema.db_age_range import view_params_age_range -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - - -age_range_view = view(view_params_age_range.name, Base.metadata, view_params_age_range.selectable) - -class AgeRangeView(Base): - __table__ = age_range_view - - code: Mapped[str] = column_property(age_range_view.c.code) - age_min: Mapped[int] = column_property(age_range_view.c.age_min) - age_max: Mapped[int] = column_property(age_range_view.c.age_max) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_dataset_view.py b/hdx_hapi/db/models/views/db_dataset_view.py deleted file mode 100644 index 62c6101b..00000000 --- a/hdx_hapi/db/models/views/db_dataset_view.py +++ /dev/null @@ -1,19 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_dataset import view_params_dataset - -from hdx_hapi.db.models.base import Base -from hdx_hapi.db.models.views.util.util import view - -dataset_view = view(view_params_dataset.name, Base.metadata, view_params_dataset.selectable) - - -class DatasetView(Base): - __table__ = dataset_view - - id: Mapped[int] = column_property(dataset_view.c.id) - hdx_id: Mapped[str] = column_property(dataset_view.c.hdx_id) - hdx_stub: Mapped[str] = column_property(dataset_view.c.hdx_stub) - title: Mapped[str] = column_property(dataset_view.c.title) - hdx_provider_stub: Mapped[str] = column_property(dataset_view.c.hdx_provider_stub) - hdx_provider_name: Mapped[str] = column_property(dataset_view.c.hdx_provider_name) diff --git a/hdx_hapi/db/models/views/db_food_security_view.py b/hdx_hapi/db/models/views/db_food_security_view.py deleted file mode 100644 index ebba54c4..00000000 --- a/hdx_hapi/db/models/views/db_food_security_view.py +++ /dev/null @@ -1,48 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property -from hapi_schema.db_food_security import view_params_food_security - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -food_security_view = view(view_params_food_security.name, Base.metadata, view_params_food_security.selectable) - - -class FoodSecurityView(Base): - __table__ = food_security_view - - id: Mapped[int] = column_property(food_security_view.c.id) - resource_ref: Mapped[int] = column_property(food_security_view.c.resource_ref) - admin2_ref: Mapped[int] = column_property(food_security_view.c.admin2_ref) - - ipc_phase_name: Mapped[str] = column_property(food_security_view.c.ipc_phase_name) - ipc_phase_code: Mapped[str] = column_property(food_security_view.c.ipc_phase_code) - ipc_type_code: Mapped[str] = column_property(food_security_view.c.ipc_type_code) - population_in_phase: Mapped[int] = column_property(food_security_view.c.population_in_phase) - population_fraction_in_phase: Mapped[float] = column_property(food_security_view.c.population_fraction_in_phase) - - reference_period_start: Mapped[DateTime] = column_property(food_security_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(food_security_view.c.reference_period_end) - source_data: Mapped[str] = column_property(food_security_view.c.source_data) - - resource_hdx_id: Mapped[str] = column_property(food_security_view.c.resource_hdx_id) - resource_name: Mapped[str] = column_property(food_security_view.c.resource_name) - resource_update_date = column_property(food_security_view.c.resource_update_date) - - dataset_hdx_id: Mapped[str] = column_property(food_security_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(food_security_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(food_security_view.c.dataset_title) - dataset_hdx_provider_stub: Mapped[str] = column_property(food_security_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(food_security_view.c.dataset_hdx_provider_name) - - location_code: Mapped[str] = column_property(food_security_view.c.location_code) - location_name: Mapped[str] = column_property(food_security_view.c.location_name) - - admin1_code: Mapped[str] = column_property(food_security_view.c.admin1_code) - admin1_name: Mapped[str] = column_property(food_security_view.c.admin1_name) - admin1_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin1_is_unspecified) - - admin2_code: Mapped[str] = column_property(food_security_view.c.admin2_code) - admin2_name: Mapped[str] = column_property(food_security_view.c.admin2_name) - admin2_is_unspecified: Mapped[bool] = column_property(food_security_view.c.admin2_is_unspecified) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_gender_view.py b/hdx_hapi/db/models/views/db_gender_view.py deleted file mode 100644 index d8888bb8..00000000 --- a/hdx_hapi/db/models/views/db_gender_view.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_gender import view_params_gender - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -gender_view = view(view_params_gender.name, Base.metadata, view_params_gender.selectable) - - -class GenderView(Base): - __table__ = gender_view - - code: Mapped[str] = column_property(gender_view.c.code) - description: Mapped[str] = column_property(gender_view.c.description) diff --git a/hdx_hapi/db/models/views/db_humanitarian_needs_view.py b/hdx_hapi/db/models/views/db_humanitarian_needs_view.py deleted file mode 100644 index ccf4cd3f..00000000 --- a/hdx_hapi/db/models/views/db_humanitarian_needs_view.py +++ /dev/null @@ -1,53 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property -from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -humanitarian_needs_view = view(view_params_humanitarian_needs.name, Base.metadata, - view_params_humanitarian_needs.selectable) - - -class HumanitarianNeedsView(Base): - __table__ = humanitarian_needs_view - - id: Mapped[int] = column_property(humanitarian_needs_view.c.id) - resource_ref: Mapped[int] = column_property(humanitarian_needs_view.c.resource_ref) - admin2_ref: Mapped[int] = column_property(humanitarian_needs_view.c.admin2_ref) - - population_status_code: Mapped[str] = column_property(humanitarian_needs_view.c.population_status_code) - population_group_code: Mapped[str] = column_property(humanitarian_needs_view.c.population_group_code) - sector_code: Mapped[str] = column_property(humanitarian_needs_view.c.sector_code) - sector_name: Mapped[str] = column_property(humanitarian_needs_view.c.sector_name) - - gender_code: Mapped[str] = column_property(humanitarian_needs_view.c.gender_code) - age_range_code: Mapped[str] = column_property(humanitarian_needs_view.c.age_range_code) - disabled_marker: Mapped[bool] = column_property(humanitarian_needs_view.c.disabled_marker) - population: Mapped[int] = column_property(humanitarian_needs_view.c.population) - - reference_period_start: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(humanitarian_needs_view.c.reference_period_end) - source_data: Mapped[str] = column_property(humanitarian_needs_view.c.source_data) - - resource_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.resource_hdx_id) - resource_name: Mapped[str] = column_property(humanitarian_needs_view.c.resource_name) - resource_update_date = column_property(humanitarian_needs_view.c.resource_update_date) - - dataset_hdx_id: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_title) - dataset_hdx_provider_stub: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(humanitarian_needs_view.c.dataset_hdx_provider_name) - - location_code: Mapped[str] = column_property(humanitarian_needs_view.c.location_code) - location_name: Mapped[str] = column_property(humanitarian_needs_view.c.location_name) - - admin1_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_code) - admin1_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin1_name) - admin1_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin1_is_unspecified) - - admin2_code: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_code) - admin2_name: Mapped[str] = column_property(humanitarian_needs_view.c.admin2_name) - admin2_is_unspecified: Mapped[bool] = column_property(humanitarian_needs_view.c.admin2_is_unspecified) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_location_view.py b/hdx_hapi/db/models/views/db_location_view.py deleted file mode 100644 index c2371cbe..00000000 --- a/hdx_hapi/db/models/views/db_location_view.py +++ /dev/null @@ -1,21 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_location import view_params_location - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -location_view = view(view_params_location.name, Base.metadata, view_params_location.selectable) - - -class LocationView(Base): - __table__ = location_view - - id: Mapped[int] = column_property(location_view.c.id) - code: Mapped[str] = column_property(location_view.c.code) - name: Mapped[str] = column_property(location_view.c.name) - - reference_period_start: Mapped[DateTime] = column_property(location_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(location_view.c.reference_period_end) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_national_risk_view.py b/hdx_hapi/db/models/views/db_national_risk_view.py deleted file mode 100644 index e3b88fc1..00000000 --- a/hdx_hapi/db/models/views/db_national_risk_view.py +++ /dev/null @@ -1,47 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property -from hapi_schema.db_national_risk import view_params_national_risk - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -national_risk_view = view(view_params_national_risk.name, Base.metadata, view_params_national_risk.selectable) - - -class NationalRiskView(Base): - __table__ = national_risk_view - - id: Mapped[int] = column_property(national_risk_view.c.id) - - resource_ref: Mapped[int] = column_property(national_risk_view.c.resource_ref) - admin2_ref: Mapped[int] = column_property(national_risk_view.c.admin2_ref) - - risk_class: Mapped[int] = column_property(national_risk_view.c.risk_class) - global_rank: Mapped[int] = column_property(national_risk_view.c.global_rank) - overall_risk: Mapped[float] = column_property(national_risk_view.c.overall_risk) - hazard_exposure_risk: Mapped[float] = column_property(national_risk_view.c.hazard_exposure_risk) - vulnerability_risk: Mapped[float] = column_property(national_risk_view.c.vulnerability_risk) - coping_capacity_risk: Mapped[float] = column_property(national_risk_view.c.coping_capacity_risk) - meta_missing_indicators_pct: Mapped[float] = column_property(national_risk_view.c.meta_missing_indicators_pct) - meta_avg_recentness_years: Mapped[float] = column_property(national_risk_view.c.meta_avg_recentness_years) - - reference_period_start: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(national_risk_view.c.reference_period_end) - - source_data: Mapped[str] = column_property(national_risk_view.c.source_data) - - dataset_hdx_id: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(national_risk_view.c.dataset_title) - dataset_hdx_provider_stub: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(national_risk_view.c.dataset_hdx_provider_name) - - resource_hdx_id: Mapped[str] = column_property(national_risk_view.c.resource_hdx_id) - resource_name: Mapped[str] = column_property(national_risk_view.c.resource_name) - resource_update_date = column_property(national_risk_view.c.resource_update_date) - - # sector_name: Mapped[str] = column_property(national_risk_view.c.sector_name) - - location_code: Mapped[str] = column_property(national_risk_view.c.location_code) - location_name: Mapped[str] = column_property(national_risk_view.c.location_name) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_operational_presence_view.py b/hdx_hapi/db/models/views/db_operational_presence_view.py deleted file mode 100644 index 3d15a0ed..00000000 --- a/hdx_hapi/db/models/views/db_operational_presence_view.py +++ /dev/null @@ -1,47 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_operational_presence import view_params_operational_presence - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - -operational_presence_view = \ - view(view_params_operational_presence.name, Base.metadata, view_params_operational_presence.selectable) - - -class OperationalPresenceView(Base): - __table__ = operational_presence_view - - id: Mapped[int] = column_property(operational_presence_view.c.id) - resource_ref: Mapped[int] = column_property(operational_presence_view.c.resource_ref) - org_ref: Mapped[int] = column_property(operational_presence_view.c.org_ref) - - sector_code: Mapped[str] = column_property(operational_presence_view.c.sector_code) - admin2_ref: Mapped[int] = column_property(operational_presence_view.c.admin2_ref) - reference_period_start: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(operational_presence_view.c.reference_period_end) - source_data: Mapped[str] = column_property(operational_presence_view.c.source_data) - - # Additional fields from other tables in the view - dataset_hdx_id: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(operational_presence_view.c.dataset_title) - dataset_hdx_provider_stub: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(operational_presence_view.c.dataset_hdx_provider_name) - resource_hdx_id: Mapped[str] = column_property(operational_presence_view.c.resource_hdx_id) - resource_name: Mapped[str] = column_property(operational_presence_view.c.resource_name) - resource_update_date: Mapped[DateTime] = column_property(operational_presence_view.c.resource_update_date) - org_acronym: Mapped[str] = column_property(operational_presence_view.c.org_acronym) - org_name: Mapped[str] = column_property(operational_presence_view.c.org_name) - org_type_code: Mapped[str] = column_property(operational_presence_view.c.org_type_code) - org_type_description: Mapped[str] = column_property(operational_presence_view.c.org_type_description) - sector_name: Mapped[str] = column_property(operational_presence_view.c.sector_name) - location_code: Mapped[str] = column_property(operational_presence_view.c.location_code) - location_name: Mapped[str] = column_property(operational_presence_view.c.location_name) - admin1_code: Mapped[str] = column_property(operational_presence_view.c.admin1_code) - admin1_name: Mapped[str] = column_property(operational_presence_view.c.admin1_name) - admin1_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin1_is_unspecified) - admin2_code: Mapped[str] = column_property(operational_presence_view.c.admin2_code) - admin2_name: Mapped[str] = column_property(operational_presence_view.c.admin2_name) - admin2_is_unspecified: Mapped[bool] = column_property(operational_presence_view.c.admin2_is_unspecified) diff --git a/hdx_hapi/db/models/views/db_org_type_view.py b/hdx_hapi/db/models/views/db_org_type_view.py deleted file mode 100644 index 70e9aca7..00000000 --- a/hdx_hapi/db/models/views/db_org_type_view.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_org_type import view_params_org_type - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -org_type_view = view(view_params_org_type.name, Base.metadata, view_params_org_type.selectable) - - -class OrgTypeView(Base): - __table__ = org_type_view - - code: Mapped[str] = column_property(org_type_view.c.code) - description: Mapped[str] = column_property(org_type_view.c.description) diff --git a/hdx_hapi/db/models/views/db_org_view.py b/hdx_hapi/db/models/views/db_org_view.py deleted file mode 100644 index f505a392..00000000 --- a/hdx_hapi/db/models/views/db_org_view.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy import Integer, String, DateTime, text -from sqlalchemy.orm import Mapped, mapped_column -from hdx_hapi.db.models.base import Base - - -class OrgView(Base): - __tablename__ = 'org_view' - - id: Mapped[int] = mapped_column(Integer, primary_key=True) - # hdx_link: Mapped[str] = mapped_column(String(1024), nullable=False) - acronym: Mapped[str] = mapped_column(String(32), nullable=False, index=True) - name: Mapped[str] = mapped_column(String(512), nullable=False) - org_type_code: Mapped[str] = mapped_column(String(32), nullable=False) - reference_period_start: Mapped[DateTime] = mapped_column(DateTime, nullable=False, index=True) - reference_period_end: Mapped[DateTime] = mapped_column(DateTime, nullable=True, server_default=text('NULL')) - org_type_description: Mapped[str] = mapped_column(String(512), nullable=False) diff --git a/hdx_hapi/db/models/views/db_population_group_view.py b/hdx_hapi/db/models/views/db_population_group_view.py deleted file mode 100644 index 4bafcb05..00000000 --- a/hdx_hapi/db/models/views/db_population_group_view.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_population_group import view_params_population_group - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -population_group_view = view(view_params_population_group.name, Base.metadata, view_params_population_group.selectable) - - -class PopulationGroupView(Base): - __table__ = population_group_view - - code: Mapped[str] = column_property(population_group_view.c.code) - description: Mapped[str] = column_property(population_group_view.c.description) diff --git a/hdx_hapi/db/models/views/db_population_status_view.py b/hdx_hapi/db/models/views/db_population_status_view.py deleted file mode 100644 index 8d503365..00000000 --- a/hdx_hapi/db/models/views/db_population_status_view.py +++ /dev/null @@ -1,17 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_population_status import view_params_population_status - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -population_status_view = view(view_params_population_status.name, Base.metadata, - view_params_population_status.selectable) - - -class PopulationStatusView(Base): - __table__ = population_status_view - - code: Mapped[str] = column_property(population_status_view.c.code) - description: Mapped[str] = column_property(population_status_view.c.description) diff --git a/hdx_hapi/db/models/views/db_population_view.py b/hdx_hapi/db/models/views/db_population_view.py deleted file mode 100644 index 0bba3653..00000000 --- a/hdx_hapi/db/models/views/db_population_view.py +++ /dev/null @@ -1,45 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property -from hapi_schema.db_population import view_params_population - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -population_view = view(view_params_population.name, Base.metadata, view_params_population.selectable) - - -class PopulationView(Base): - __table__ = population_view - - id: Mapped[int] = column_property(population_view.c.id) - resource_ref: Mapped[int] = column_property(population_view.c.resource_ref) - admin2_ref: Mapped[int] = column_property(population_view.c.admin2_ref) - gender_code: Mapped[str] = column_property(population_view.c.gender_code) - age_range_code: Mapped[str] = column_property(population_view.c.age_range_code) - - population: Mapped[int] = column_property(population_view.c.population) - reference_period_start: Mapped[DateTime] = column_property(population_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(population_view.c.reference_period_end) - source_data: Mapped[str] = column_property(population_view.c.source_data) - - resource_hdx_id: Mapped[str] = column_property(population_view.c.resource_hdx_id) - resource_name: Mapped[str] = column_property(population_view.c.resource_name) - resource_update_date = column_property(population_view.c.resource_update_date) - - dataset_hdx_id: Mapped[str] = column_property(population_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(population_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(population_view.c.dataset_title) - dataset_hdx_provider_stub: Mapped[str] = column_property(population_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(population_view.c.dataset_hdx_provider_name) - - location_code: Mapped[str] = column_property(population_view.c.location_code) - location_name: Mapped[str] = column_property(population_view.c.location_name) - - admin1_code: Mapped[str] = column_property(population_view.c.admin1_code) - admin1_name: Mapped[str] = column_property(population_view.c.admin1_name) - admin1_is_unspecified: Mapped[bool] = column_property(population_view.c.admin1_is_unspecified) - - admin2_code: Mapped[str] = column_property(population_view.c.admin2_code) - admin2_name: Mapped[str] = column_property(population_view.c.admin2_name) - admin2_is_unspecified: Mapped[bool] = column_property(population_view.c.admin2_is_unspecified) \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_resource_view.py b/hdx_hapi/db/models/views/db_resource_view.py deleted file mode 100644 index 68a9f1cb..00000000 --- a/hdx_hapi/db/models/views/db_resource_view.py +++ /dev/null @@ -1,30 +0,0 @@ -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_resource import view_params_resource - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -resource_view = view(view_params_resource.name, Base.metadata, view_params_resource.selectable) - - -class ResourceView(Base): - __table__ = resource_view - - id: Mapped[int] = column_property(resource_view.c.id) - dataset_ref: Mapped[int] = column_property(resource_view.c.dataset_ref) - hdx_id: Mapped[str] = column_property(resource_view.c.hdx_id) - name: Mapped[str] = column_property(resource_view.c.name) - format: Mapped[str] = column_property(resource_view.c.format) - update_date = column_property(resource_view.c.update_date) - is_hxl: Mapped[bool] = column_property(resource_view.c.is_hxl) - download_url: Mapped[str] = column_property(resource_view.c.download_url) - - dataset_hdx_id: Mapped[str] = column_property(resource_view.c.dataset_hdx_id) - dataset_hdx_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_stub) - dataset_title: Mapped[str] = column_property(resource_view.c.dataset_title) - - dataset_hdx_provider_stub: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_stub) - dataset_hdx_provider_name: Mapped[str] = column_property(resource_view.c.dataset_hdx_provider_name) - \ No newline at end of file diff --git a/hdx_hapi/db/models/views/db_sector_view.py b/hdx_hapi/db/models/views/db_sector_view.py deleted file mode 100644 index d8be179c..00000000 --- a/hdx_hapi/db/models/views/db_sector_view.py +++ /dev/null @@ -1,19 +0,0 @@ -from sqlalchemy import DateTime -from sqlalchemy.orm import Mapped, column_property - -from hapi_schema.db_sector import view_params_sector - -from hdx_hapi.db.models.views.util.util import view -from hdx_hapi.db.models.base import Base - - -sector_view = view(view_params_sector.name, Base.metadata, view_params_sector.selectable) - - -class SectorView(Base): - __table__ = sector_view - - code: Mapped[str] = column_property(sector_view.c.code) - name: Mapped[str] = column_property(sector_view.c.name) - reference_period_start: Mapped[DateTime] = column_property(sector_view.c.reference_period_start) - reference_period_end: Mapped[DateTime] = column_property(sector_view.c.reference_period_end) diff --git a/hdx_hapi/endpoints/exception_handler/__init__.py b/hdx_hapi/endpoints/exception_handler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hdx_hapi/endpoints/exception_handler/response_validation_error_handler.py b/hdx_hapi/endpoints/exception_handler/response_validation_error_handler.py new file mode 100644 index 00000000..5cd037a1 --- /dev/null +++ b/hdx_hapi/endpoints/exception_handler/response_validation_error_handler.py @@ -0,0 +1,24 @@ +import logging + +from fastapi import Request, status +from fastapi.exceptions import ResponseValidationError +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse + + +logger = logging.getLogger(__name__) + + +async def response_validation_error_handler(request: Request, exc: ResponseValidationError) -> JSONResponse: + errors = exc.errors() + err_num = len(errors) if errors else 0 + error_message = 'Internal Server Error. Response data is invalid.' + logger.error(f'{error_message}. There were {err_num} errors. Request url was {request.url}.') + if err_num > 0: + error_message += ( + f' There were {err_num} errors. A couple of errors will be shown in the "error_sample_list" field.' + ) + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=jsonable_encoder({'error_sample_list': errors[:5], 'error': error_message}), + ) diff --git a/hdx_hapi/endpoints/get_admin_level.py b/hdx_hapi/endpoints/get_admin_level.py index c371e1e4..c477024a 100644 --- a/hdx_hapi/endpoints/get_admin_level.py +++ b/hdx_hapi/endpoints/get_admin_level.py @@ -1,4 +1,4 @@ -from typing import List, Annotated +from typing import Annotated from fastapi import Depends, Query, APIRouter from sqlalchemy.ext.asyncio import AsyncSession @@ -14,8 +14,15 @@ DOC_SEE_LOC, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.admin_level import Admin1Response, Admin2Response, LocationResponse -from hdx_hapi.endpoints.util.util import OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, +) from hdx_hapi.services.admin1_logic import get_admin1_srv from hdx_hapi.services.admin2_logic import get_admin2_srv from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested @@ -25,60 +32,87 @@ CONFIG = get_config() router = APIRouter( - tags=['Locations and Administrative Divisions'], + tags=['Metadata'], ) @router.get( - '/api/location', - response_model=List[LocationResponse], - summary='Get the list of locations (typically countries) included in HAPI', + '/api/metadata/location', + response_model=HapiGenericResponse[LocationResponse], + summary='Get the list of locations (typically countries) included in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/location', - response_model=List[LocationResponse], - summary='Get the list of locations (typically countries) included in HAPI', + '/api/v1/metadata/location', + response_model=HapiGenericResponse[LocationResponse], + summary='Get the list of locations (typically countries) included in HDX HAPI', ) async def get_locations( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE}')] = None, name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): - result = await get_locations_srv(pagination_parameters=pagination_parameters, db=db, code=code, name=name) + ref_period_parameters = None + result = await get_locations_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + code=code, + name=name, + ) return transform_result_to_csv_stream_if_requested(result, output_format, LocationResponse) get_locations.__doc__ = ( - 'Not all data are available for all locations. Learn more about the scope of data coverage in HAPI in ' + 'Not all data are available for all locations. Learn more about the scope of data coverage in HDX HAPI in ' f'the Overview and Getting Started documentation.' ) @router.get( - '/api/admin1', - response_model=List[Admin1Response], - summary='Get the list of first-level subnational administrative divisions available in HAPI', + '/api/metadata/admin1', + response_model=HapiGenericResponse[Admin1Response], + summary='Get the list of first-level subnational administrative divisions available in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/admin1', - response_model=List[Admin1Response], - summary='Get the list of first-level subnational administrative divisions available in HAPI', + '/api/v1/metadata/admin1', + response_model=HapiGenericResponse[Admin1Response], + summary='Get the list of first-level subnational administrative divisions available in HDX HAPI', ) async def get_admin1( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE}')] = None, name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME}')] = None, + # hapi_updated_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'), + # ] = None, + # hapi_updated_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'), + # ] = None, + # hapi_replaced_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'), + # ] = None, + # hapi_replaced_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'), + # ] = None, location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): + ref_period_parameters = None result = await get_admin1_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, code=code, name=name, @@ -89,35 +123,54 @@ async def get_admin1( get_admin1.__doc__ = ( - 'Not all data are available for all locations. Learn more about the scope of data coverage in HAPI in ' + 'Not all data are available for all locations. Learn more about the scope of data coverage in HDX HAPI in ' f'the Overview and Getting Started documentation.' ) @router.get( - '/api/admin2', - response_model=List[Admin2Response], - summary='Get the list of second-level administrative divisions available in HAPI', + '/api/metadata/admin2', + response_model=HapiGenericResponse[Admin2Response], + summary='Get the list of second-level administrative divisions available in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/admin2', - response_model=List[Admin2Response], - summary='Get the list of second-level administrative divisions available in HAPI', + '/api/v1/metadata/admin2', + response_model=HapiGenericResponse[Admin2Response], + summary='Get the list of second-level administrative divisions available in HDX HAPI', ) async def get_admin2( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE}')] = None, name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME}')] = None, + # hapi_updated_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'), + # ] = None, + # hapi_updated_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'), + # ] = None, + # hapi_replaced_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'), + # ] = None, + # hapi_replaced_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'), + # ] = None, admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None, admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): + ref_period_parameters = None result = await get_admin2_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, code=code, name=name, @@ -130,6 +183,6 @@ async def get_admin2( get_admin2.__doc__ = ( - 'Not all data are available for all locations. Learn more about the scope of data coverage in HAPI in ' + 'Not all data are available for all locations. Learn more about the scope of data coverage in HDX HAPI in ' f'the Overview and Getting Started documentation.' ) diff --git a/hdx_hapi/endpoints/get_affected_people.py b/hdx_hapi/endpoints/get_affected_people.py new file mode 100644 index 00000000..181b8abf --- /dev/null +++ b/hdx_hapi/endpoints/get_affected_people.py @@ -0,0 +1,199 @@ +from typing import Annotated, Optional +from fastapi import Depends, Query, APIRouter +# from pydantic import NaiveDatetime + + +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.config.doc_snippets import ( + DOC_GENDER, + DOC_AGE_RANGE, + DOC_POPULATION_GROUP, + DOC_POPULATION_STATUS, + DOC_SECTOR_CODE, + DOC_SECTOR_NAME, + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN2_REF, + DOC_ADMIN2_NAME, + DOC_ADMIN2_CODE, + DOC_LOCATION_REF, + DOC_LOCATION_CODE, + DOC_LOCATION_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_LOC, + DOC_SEE_ADMIN2, +) + +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.humanitarian_needs import HumanitarianNeedsResponse +from hdx_hapi.endpoints.models.refugees import RefugeesResponse +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.humanitarian_needs_logic import get_humanitarian_needs_srv +from hdx_hapi.services.refugees_logic import get_refugees_srv +from hdx_hapi.services.sql_alchemy_session import get_db +from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, + AdminLevel, +) + +router = APIRouter( + tags=['Affected people'], +) + + +@router.get( + '/api/affected-people/humanitarian-needs', + response_model=HapiGenericResponse[HumanitarianNeedsResponse], + summary='Get humanitarian needs data', + include_in_schema=False, +) +@router.get( + '/api/v1/affected-people/humanitarian-needs', + response_model=HapiGenericResponse[HumanitarianNeedsResponse], + summary='Get humanitarian needs data', +) +async def get_humanitarian_needs( + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + gender: Annotated[Optional[Gender], Query(max_length=3, description=f'{DOC_GENDER}')] = None, + age_range: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_AGE_RANGE}')] = None, + disabled_marker: Annotated[Optional[DisabledMarker], Query(description='Disabled marker')] = None, + sector_code: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_SECTOR_CODE}')] = None, + population_group: Annotated[ + Optional[PopulationGroup], Query(max_length=32, description=f'{DOC_POPULATION_GROUP}') + ] = None, + population_status: Annotated[ + Optional[PopulationStatus], Query(max_length=32, description=f'{DOC_POPULATION_STATUS}') + ] = None, + population_min: Annotated[int, Query(description='Population, minimum value for filter')] = None, + population_max: Annotated[int, Query(description='Population, maximum value for filter')] = None, + # reference_period_start: Annotated[ + # NaiveDatetime | date, + # Query(description='Reference period start', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}), + # ] = None, + # reference_period_end: Annotated[ + # NaiveDatetime | date, + # Query(description='Reference period end', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), + # ] = None, + sector_name: Annotated[Optional[str], Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + location_ref: Annotated[Optional[int], Query(description=f'{DOC_LOCATION_REF}')] = None, + admin1_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}') + ] = None, + admin2_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}') + ] = None, + admin2_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}') + ] = None, + admin1_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin_level: Annotated[Optional[AdminLevel], Query(description='Filter the response by admin level')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + UNOCHA's Humanitarian Needs data, based on the Joint and Intersectoral Analysis Framework (JIAF), + provides information about the number of people in need during a crisis. + See the more detailed technical HDX HAPI documentation, + and the original JIAF source website.” + """ + ref_period_parameters = None + result = await get_humanitarian_needs_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + admin2_ref=admin2_ref, + gender=gender, + age_range=age_range, + disabled_marker=disabled_marker, + sector_code=sector_code, + population_group=population_group, + population_status=population_status, + population_min=population_min, + population_max=population_max, + sector_name=sector_name, + location_code=location_code, + location_name=location_name, + location_ref=location_ref, + admin1_code=admin1_code, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin1_ref=admin1_ref, + admin_level=admin_level, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, HumanitarianNeedsResponse) + + +## refugees + + +@router.get( + '/api/affected-people/refugees', + response_model=HapiGenericResponse[RefugeesResponse], + summary='Get refugees data', + include_in_schema=False, +) +@router.get( + '/api/v1/affected-people/refugees', + response_model=HapiGenericResponse[RefugeesResponse], + summary='Get refugees data', +) +async def get_refugees( + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + population_group: Annotated[ + Optional[PopulationGroup], Query(max_length=32, description=f'{DOC_POPULATION_GROUP}') + ] = None, + population_min: Annotated[int, Query(description='Population, minimum value for filter')] = None, + population_max: Annotated[int, Query(description='Population, maximum value for filter')] = None, + gender: Annotated[Optional[Gender], Query(max_length=3, description=f'{DOC_GENDER}')] = None, + age_range: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_AGE_RANGE}')] = None, + origin_location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + origin_location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + asylum_location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + asylum_location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + UNHCR's Refugee data provides information about displaced people in a crisis. + See the more detailed technical HDX HAPI documentation, + and the original HDX source website. + """ + ref_period_parameters = None + result = await get_refugees_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + population_group=population_group, + population_min=population_min, + population_max=population_max, + gender=gender, + age_range=age_range, + origin_location_code=origin_location_code, + origin_location_name=origin_location_name, + asylum_location_code=asylum_location_code, + asylum_location_name=asylum_location_name, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, RefugeesResponse) diff --git a/hdx_hapi/endpoints/get_conflict_events.py b/hdx_hapi/endpoints/get_conflict_events.py new file mode 100644 index 00000000..d92b2ece --- /dev/null +++ b/hdx_hapi/endpoints/get_conflict_events.py @@ -0,0 +1,108 @@ +from typing import Annotated, Optional +from fastapi import APIRouter, Depends, Query + +from hapi_schema.utils.enums import EventType +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.config.doc_snippets import ( + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, + DOC_ADMIN2_CODE, + DOC_ADMIN2_NAME, + DOC_LOCATION_REF, + DOC_LOCATION_CODE, + DOC_LOCATION_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_ADMIN2, + DOC_SEE_LOC, +) +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.conflict_event import ConflictEventResponse +from hdx_hapi.endpoints.util.util import ( + AdminLevel, + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, +) +from hdx_hapi.services.conflict_view_logic import get_conflict_event_srv +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.sql_alchemy_session import get_db + + +router = APIRouter( + tags=['Coordination & Context'], +) + +SUMMARY_TEXT = 'Get the list of conflict events' + + +@router.get( + '/api/coordination-context/conflict-event', + response_model=HapiGenericResponse[ConflictEventResponse], + summary=SUMMARY_TEXT, + include_in_schema=False, +) +@router.get( + '/api/v1/coordination-context/conflict-event', + response_model=HapiGenericResponse[ConflictEventResponse], + summary=SUMMARY_TEXT, +) +async def get_conflict_events( + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + event_type: Annotated[ + Optional[EventType], + Query(description='Event type, one of civilian_targeting, demonstration, political_violence'), + ] = None, + location_ref: Annotated[Optional[int], Query(description=f'{DOC_LOCATION_REF}')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + admin1_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin1_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}') + ] = None, + admin1_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}') + ] = None, + admin2_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}') + ] = None, + admin2_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}') + ] = None, + admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + Armed Conflict Location & Events Data from ACLED. + See the more detailed technical HDX HAPI documentation, + and the original ACLED source website. + """ + ref_period_parameters = None + result = await get_conflict_event_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + event_type=event_type, + location_ref=location_ref, + location_code=location_code, + location_name=location_name, + admin1_ref=admin1_ref, + admin1_code=admin1_code, + admin1_name=admin1_name, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin_level=admin_level, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, ConflictEventResponse) diff --git a/hdx_hapi/endpoints/get_currency.py b/hdx_hapi/endpoints/get_currency.py new file mode 100644 index 00000000..41309710 --- /dev/null +++ b/hdx_hapi/endpoints/get_currency.py @@ -0,0 +1,52 @@ +from typing import Annotated +from fastapi import Depends, Query, APIRouter + + +from sqlalchemy.ext.asyncio import AsyncSession +from hdx_hapi.config.doc_snippets import DOC_CURRENCY_CODE + +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.currency import CurrencyResponse +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + common_endpoint_parameters, +) +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested + +from hdx_hapi.services.currency_logic import get_currencies_srv +from hdx_hapi.services.sql_alchemy_session import get_db + +router = APIRouter( + tags=['Metadata'], +) + + +@router.get( + '/api/metadata/currency', + response_model=HapiGenericResponse[CurrencyResponse], + summary='Get information about how currencies are classified', + include_in_schema=False, +) +@router.get( + '/api/v1/metadata/currency', + response_model=HapiGenericResponse[CurrencyResponse], + summary='Get information about how currencies are classified', +) +async def get_currencies( + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + code: Annotated[ + str, Query(max_length=32, description=f'{DOC_CURRENCY_CODE}', openapi_examples={'usd': {'value': 'usd'}}) + ] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + Provide currency information to use in conjunction with the food-prices endpoint + """ + result = await get_currencies_srv( + pagination_parameters=common_parameters, + db=db, + code=code, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, CurrencyResponse) diff --git a/hdx_hapi/endpoints/get_demographic.py b/hdx_hapi/endpoints/get_demographic.py index 8b45cde5..e69de29b 100644 --- a/hdx_hapi/endpoints/get_demographic.py +++ b/hdx_hapi/endpoints/get_demographic.py @@ -1,73 +0,0 @@ -from typing import List, Annotated -from fastapi import Depends, Query, APIRouter - - -from sqlalchemy.ext.asyncio import AsyncSession -from hdx_hapi.config.doc_snippets import ( - DOC_AGE_RANGE_CODE, - DOC_AGE_RANGE_SUMMARY, - DOC_GENDER_CODE, - DOC_GENDER_DESCRIPTION, - DOC_GENDER_SUMMARY, -) - -from hdx_hapi.endpoints.models.demographic import AgeRangeResponse, GenderResponse -from hdx_hapi.endpoints.util.util import pagination_parameters, OutputFormat -from hdx_hapi.services.age_range_logic import get_age_ranges_srv -from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested -from hdx_hapi.services.gender_logic import get_genders_srv -from hdx_hapi.services.sql_alchemy_session import get_db - - -router = APIRouter( - tags=['Age and Gender Disaggregations'], -) - - -@router.get( - '/api/age_range', response_model=List[AgeRangeResponse], summary=f'{DOC_AGE_RANGE_SUMMARY}', include_in_schema=False -) -@router.get('/api/v1/age_range', response_model=List[AgeRangeResponse], summary=f'{DOC_AGE_RANGE_SUMMARY}') -async def get_age_ranges( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], - db: AsyncSession = Depends(get_db), - code: Annotated[ - str, Query(max_length=32, openapi_examples={'20-24': {'value': '20-24'}}, description=f'{DOC_AGE_RANGE_CODE}') - ] = None, - output_format: OutputFormat = OutputFormat.JSON, -): - """Get the list of age ranges used for disaggregating population data. Age ranges are not standardized across - different data sources and instead reflect the age range breakdowns provided by the data source. - """ - result = await get_age_ranges_srv( - pagination_parameters=pagination_parameters, - db=db, - code=code, - ) - - return transform_result_to_csv_stream_if_requested(result, output_format, AgeRangeResponse) - - -@router.get( - '/api/gender', response_model=List[GenderResponse], summary=f'{DOC_GENDER_SUMMARY}', include_in_schema=False -) -@router.get('/api/v1/gender', response_model=List[GenderResponse], summary=f'{DOC_GENDER_SUMMARY}') -async def get_genders( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], - db: AsyncSession = Depends(get_db), - code: Annotated[ - str, Query(max_length=1, description=f'{DOC_GENDER_CODE}', openapi_examples={'f': {'value': 'f'}}) - ] = None, - description: Annotated[ - str, - Query( - max_length=256, description=f'{DOC_GENDER_DESCRIPTION}', openapi_examples={'female': {'value': 'female'}} - ), - ] = None, - output_format: OutputFormat = OutputFormat.JSON, -): - """ """ - result = await get_genders_srv( - pagination_parameters=pagination_parameters, db=db, code=code, description=description - ) - return transform_result_to_csv_stream_if_requested(result, output_format, GenderResponse) diff --git a/hdx_hapi/endpoints/get_encoded_identifier.py b/hdx_hapi/endpoints/get_encoded_identifier.py index 183d1769..d18764cc 100644 --- a/hdx_hapi/endpoints/get_encoded_identifier.py +++ b/hdx_hapi/endpoints/get_encoded_identifier.py @@ -1,38 +1,37 @@ import base64 from typing import Annotated -from fastapi import APIRouter, Query +from fastapi import APIRouter from pydantic import EmailStr -from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.endpoints.models.encoded_identifier import IdentifierResponse -from hdx_hapi.endpoints.util.util import OutputFormat +from hdx_hapi.endpoints.util.util import app_name_identifier_query, email_identifier_query router = APIRouter( - tags=['Utility'], + tags=['Generate App Identifier'], ) -SUMMARY = 'Get an encoded application name plus email' +SUMMARY = 'Get an app identifier by encoding an application name and email' @router.get( - '/api/encode_identifier', + '/api/encode_app_identifier', response_model=IdentifierResponse, summary=SUMMARY, include_in_schema=False, ) @router.get( - '/api/v1/encode_identifier', + '/api/v1/encode_app_identifier', response_model=IdentifierResponse, summary=SUMMARY, ) async def get_encoded_identifier( - application: Annotated[str, Query(max_length=512, description='A name for the calling application')] = None, - email: Annotated[EmailStr, Query(max_length=512, description='An email address')] = None, + application: Annotated[str, app_name_identifier_query], + email: Annotated[EmailStr, email_identifier_query], ): """ - Encode an application name and email address in base64 to serve as an client identifier in HAPI calls + Encode an application name and email address in base64 to serve as an client identifier in HDX HAPI calls """ encoded_identifier = base64.b64encode(bytes(f'{application}:{email}', 'utf-8')) - result = {'encoded_identifier': encoded_identifier.decode('utf-8')} - return transform_result_to_csv_stream_if_requested(result, OutputFormat.JSON, IdentifierResponse) + result = {'encoded_app_identifier': encoded_identifier.decode('utf-8')} + return result diff --git a/hdx_hapi/endpoints/get_food_price.py b/hdx_hapi/endpoints/get_food_price.py new file mode 100644 index 00000000..17d1cab7 --- /dev/null +++ b/hdx_hapi/endpoints/get_food_price.py @@ -0,0 +1,117 @@ +from decimal import Decimal +from typing import Annotated, Optional +from fastapi import Depends, Query, APIRouter + +from hapi_schema.utils.enums import CommodityCategory, PriceFlag, PriceType +from sqlalchemy.ext.asyncio import AsyncSession +from hdx_hapi.config.doc_snippets import ( + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, + DOC_ADMIN2_CODE, + DOC_ADMIN2_NAME, + DOC_LOCATION_REF, + DOC_LOCATION_CODE, + DOC_LOCATION_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_ADMIN2, + DOC_SEE_LOC, +) + +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.food_price import FoodPriceResponse +from hdx_hapi.endpoints.util.util import ( + AdminLevel, + CommonEndpointParams, + OutputFormat, + common_endpoint_parameters, +) +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.food_price_logic import get_food_prices_srv +from hdx_hapi.services.sql_alchemy_session import get_db + +router = APIRouter( + tags=['Food Security & Nutrition'], +) + +SUMMARY_TEXT = 'Get food prices.' + + +@router.get( + '/api/food/food-price', + response_model=HapiGenericResponse[FoodPriceResponse], + summary=SUMMARY_TEXT, + include_in_schema=False, +) +@router.get( + '/api/v1/food/food-price', + response_model=HapiGenericResponse[FoodPriceResponse], + summary=SUMMARY_TEXT, +) +async def get_food_prices( + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + market_code: Annotated[Optional[str], Query(max_length=32, description='Market code')] = None, + market_name: Annotated[Optional[str], Query(max_length=512, description='Market name')] = None, + commodity_code: Annotated[Optional[str], Query(max_length=32, description='Commodity code')] = None, + commodity_category: Annotated[Optional[CommodityCategory], Query(description='Commodity category')] = None, + commodity_name: Annotated[Optional[str], Query(max_length=512, description='Commodity name')] = None, + price_flag: Annotated[Optional[PriceFlag], Query(description='Price Flag')] = None, + price_type: Annotated[Optional[PriceType], Query(description='Price Type')] = None, + price_min: Annotated[Optional[Decimal], Query(description='Price, lower bound')] = None, + price_max: Annotated[Optional[Decimal], Query(description='Price, upper bound')] = None, + location_ref: Annotated[Optional[int], Query(description=f'{DOC_LOCATION_REF}')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + admin1_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin1_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}') + ] = None, + admin1_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}') + ] = None, + admin2_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}') + ] = None, + admin2_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}') + ] = None, + admin_level: Annotated[Optional[AdminLevel], Query(description='Filter the response by admin level')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + The World Food Programme (WFP) food prices data provides information about food prices for a range of commodities + at markets across the world. See the more detailed technical + HDX HAPI documentation, + and the original WFP source website. + """ + result = await get_food_prices_srv( + pagination_parameters=common_parameters, + db=db, + market_code=market_code, + market_name=market_name, + commodity_code=commodity_code, + commodity_category=commodity_category, + commodity_name=commodity_name, + price_flag=price_flag, + price_type=price_type, + price_min=price_min, + price_max=price_max, + location_code=location_code, + location_name=location_name, + admin1_code=admin1_code, + admin1_name=admin1_name, + location_ref=location_ref, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin1_ref=admin1_ref, + admin_level=admin_level, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, FoodPriceResponse) diff --git a/hdx_hapi/endpoints/get_food_security.py b/hdx_hapi/endpoints/get_food_security.py index 9899d493..538380ea 100644 --- a/hdx_hapi/endpoints/get_food_security.py +++ b/hdx_hapi/endpoints/get_food_security.py @@ -1,77 +1,96 @@ -from datetime import date -from typing import List, Annotated +from typing import Annotated from fastapi import Depends, Query, APIRouter -from pydantic import NaiveDatetime from sqlalchemy.ext.asyncio import AsyncSession - +from hapi_schema.utils.enums import IPCPhase, IPCType from hdx_hapi.config.doc_snippets import ( + DOC_LOCATION_REF, DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_LOC, - DOC_UPDATE_DATE_MAX, - DOC_UPDATE_DATE_MIN, + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, + DOC_ADMIN2_CODE, + DOC_ADMIN2_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_ADMIN2, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.food_security import FoodSecurityResponse -from hdx_hapi.endpoints.util.util import AdminLevel, OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, + AdminLevel, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.services.food_security_logic import get_food_security_srv from hdx_hapi.services.sql_alchemy_session import get_db router = APIRouter( - tags=['Food Security'], + tags=['Food Security & Nutrition'], ) @router.get( - '/api/themes/food_security', - response_model=List[FoodSecurityResponse], + '/api/food/food-security', + response_model=HapiGenericResponse[FoodSecurityResponse], summary='Get food security data', include_in_schema=False, ) -@router.get('/api/v1/themes/food_security', response_model=List[FoodSecurityResponse], summary='Get food security data') +@router.get( + '/api/v1/food/food-security', + response_model=HapiGenericResponse[FoodSecurityResponse], + summary='Get food security data', +) async def get_food_security( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], db: AsyncSession = Depends(get_db), - ipc_phase_code: Annotated[str, Query(max_length=32, description='IPC phase code')] = None, - ipc_type_code: Annotated[str, Query(max_length=32, description='IPC type code')] = None, - dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description='Organization(provider) code')] = None, - resource_update_date_min: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}), - ] = None, - resource_update_date_max: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), - ] = None, + ipc_phase: Annotated[IPCPhase, Query(description='IPC Phase')] = None, + ipc_type: Annotated[IPCType, Query(description='IPC Type')] = None, + location_ref: Annotated[int, Query(description=f'{DOC_LOCATION_REF}')] = None, location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, - admin1_name: Annotated[str, Query(max_length=512, description='Admin1 name')] = None, - admin1_code: Annotated[str, Query(max_length=128, description='Admin1 code')] = None, - admin2_name: Annotated[str, Query(max_length=512, description='Admin2 name')] = None, - admin2_code: Annotated[str, Query(max_length=128, description='Admin2 code')] = None, + admin1_ref: Annotated[int, Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None, + admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, + # admin1_is_unspecified: Annotated[bool, Query(description='Location Adm1 is not specified')] = None, + admin2_ref: Annotated[int, Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None, + admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None, admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None, + # admin2_is_unspecified: Annotated[bool, Query(description='Is admin2 specified or not')] = None, output_format: OutputFormat = OutputFormat.JSON, ): """ - Return the list of food security data + Integrated Food Security Phase Classification from the IPC. + See the more detailed technical HDX HAPI documentation, + and the + original IPC source website. """ + ref_period_parameters = None result = await get_food_security_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, - ipc_phase_code=ipc_phase_code, - ipc_type_code=ipc_type_code, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + ipc_phase=ipc_phase, + ipc_type=ipc_type, location_code=location_code, location_name=location_name, admin1_name=admin1_name, admin1_code=admin1_code, + location_ref=location_ref, + admin2_ref=admin2_ref, admin2_code=admin2_code, admin2_name=admin2_name, + admin1_ref=admin1_ref, admin_level=admin_level, ) return transform_result_to_csv_stream_if_requested(result, output_format, FoodSecurityResponse) diff --git a/hdx_hapi/endpoints/get_funding.py b/hdx_hapi/endpoints/get_funding.py new file mode 100644 index 00000000..75435df9 --- /dev/null +++ b/hdx_hapi/endpoints/get_funding.py @@ -0,0 +1,66 @@ +from typing import Annotated, Optional +from fastapi import APIRouter, Depends, Query + +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.config.doc_snippets import DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_LOC +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.funding import FundingResponse +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, +) +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.funding_logic import get_funding_srv +from hdx_hapi.services.sql_alchemy_session import get_db + + +router = APIRouter( + tags=['Coordination & Context'], +) + + +@router.get( + '/api/coordination-context/funding', + response_model=HapiGenericResponse[FundingResponse], + summary='Get funding data', + include_in_schema=False, +) +@router.get( + '/api/v1/coordination-context/funding', + response_model=HapiGenericResponse[FundingResponse], + summary='Get funding data', +) +async def get_fundings( + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + appeal_code: Annotated[Optional[str], Query(max_length=32, description='Appeal code')] = None, + appeal_type: Annotated[Optional[str], Query(max_length=32, description='Appeal type')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + UNOCHA's funding data from the Financial Tracking Service provides information on humanitarian aid contributions. + See the more detailed technical HDX HAPI documentation, + and the original FTS source website. + """ + ref_period_parameters = None + result = await get_funding_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + appeal_code=appeal_code, + appeal_type=appeal_type, + location_code=location_code, + location_name=location_name, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, FundingResponse) diff --git a/hdx_hapi/endpoints/get_hdx_metadata.py b/hdx_hapi/endpoints/get_hdx_metadata.py index 783607cf..31f2924d 100644 --- a/hdx_hapi/endpoints/get_hdx_metadata.py +++ b/hdx_hapi/endpoints/get_hdx_metadata.py @@ -1,5 +1,5 @@ from datetime import date -from typing import List, Annotated +from typing import Annotated, Optional from fastapi import Depends, Query, APIRouter from pydantic import NaiveDatetime @@ -15,54 +15,62 @@ DOC_HDX_RESOURCE_FORMAT, DOC_HDX_RESOURCE_HXL, DOC_HDX_RESOURCE_ID, + DOC_HDX_DATASET_IN_RESOURCE_ID, + DOC_HDX_DATASET_IN_RESOURCE_NAME, + DOC_HDX_PROVIDER_IN_RESOURCE_STUB, DOC_SEE_DATASET, DOC_UPDATE_DATE_MAX, DOC_UPDATE_DATE_MIN, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.hdx_metadata import DatasetResponse, ResourceResponse -from hdx_hapi.endpoints.util.util import OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + common_endpoint_parameters, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.services.dataset_logic import get_datasets_srv from hdx_hapi.services.resource_logic import get_resources_srv from hdx_hapi.services.sql_alchemy_session import get_db router = APIRouter( - tags=['HDX Metadata'], + tags=['Metadata'], ) @router.get( - '/api/dataset', - response_model=List[DatasetResponse], - summary='Get information about the sources of the data in HAPI', + '/api/metadata/dataset', + response_model=HapiGenericResponse[DatasetResponse], + summary='Get information about the sources of the data in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/dataset', - response_model=List[DatasetResponse], - summary='Get information about the sources of the data in HAPI', + '/api/v1/metadata/dataset', + response_model=HapiGenericResponse[DatasetResponse], + summary='Get information about the sources of the data in HDX HAPI', ) async def get_datasets( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), - hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_DATASET_ID}')] = None, - hdx_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_DATASET_NAME}')] = None, - title: Annotated[str, Query(max_length=1024, description=f'{DOC_HDX_DATASET_TITLE}')] = None, - hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None, - hdx_provider_name: Annotated[str, Query(max_length=512, description=f'{DOC_HDX_PROVIDER_NAME}')] = None, + dataset_hdx_id: Annotated[Optional[str], Query(max_length=36, description=f'{DOC_HDX_DATASET_ID}')] = None, + dataset_hdx_stub: Annotated[Optional[str], Query(max_length=128, description=f'{DOC_HDX_DATASET_NAME}')] = None, + dataset_hdx_title: Annotated[Optional[str], Query(max_length=1024, description=f'{DOC_HDX_DATASET_TITLE}')] = None, + hdx_provider_stub: Annotated[Optional[str], Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None, + hdx_provider_name: Annotated[Optional[str], Query(max_length=512, description=f'{DOC_HDX_PROVIDER_NAME}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): """ Get information about the HDX Datasets that are used as data sources - for HAPI. Datasets contain one or more resources, which are the sources of the data found in HAPI. + for HDX HAPI. Datasets contain one or more resources, which are the sources of the data found in HDX HAPI. """ result = await get_datasets_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, db=db, - hdx_id=hdx_id, - hdx_stub=hdx_stub, - title=title, + dataset_hdx_id=dataset_hdx_id, + dataset_hdx_stub=dataset_hdx_stub, + dataset_hdx_title=dataset_hdx_title, hdx_provider_stub=hdx_provider_stub, hdx_provider_name=hdx_provider_name, ) @@ -70,20 +78,20 @@ async def get_datasets( @router.get( - '/api/resource', - response_model=List[ResourceResponse], - summary='Get information about the sources of the data in HAPI', + '/api/metadata/resource', + response_model=HapiGenericResponse[ResourceResponse], + summary='Get information about the sources of the data in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/resource', - response_model=List[ResourceResponse], - summary='Get information about the sources of the data in HAPI', + '/api/v1/metadata/resource', + response_model=HapiGenericResponse[ResourceResponse], + summary='Get information about the sources of the data in HDX HAPI', ) async def get_resources( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), - hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_RESOURCE_ID}')] = None, + resource_hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_RESOURCE_ID}')] = None, format: Annotated[str, Query(max_length=32, description=f'{DOC_HDX_RESOURCE_FORMAT}')] = None, update_date_min: Annotated[ NaiveDatetime | date, @@ -94,32 +102,36 @@ async def get_resources( Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), ] = None, is_hxl: Annotated[bool, Query(description=f'{DOC_HDX_RESOURCE_HXL}')] = None, - dataset_hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_DATASET_ID} {DOC_SEE_DATASET} ')] = None, + dataset_hdx_id: Annotated[ + str, Query(max_length=36, description=f'{DOC_HDX_DATASET_IN_RESOURCE_ID} {DOC_SEE_DATASET} ') + ] = None, dataset_hdx_stub: Annotated[ - str, Query(max_length=128, description=f'{DOC_HDX_DATASET_NAME} {DOC_SEE_DATASET}') + str, Query(max_length=128, description=f'{DOC_HDX_DATASET_IN_RESOURCE_NAME} {DOC_SEE_DATASET}') ] = None, - dataset_title: Annotated[ + dataset_hdx_title: Annotated[ str, Query(max_length=1024, description=f'{DOC_HDX_DATASET_TITLE} {DOC_SEE_DATASET}') ] = None, - dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None, + dataset_hdx_provider_stub: Annotated[ + str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_IN_RESOURCE_STUB}') + ] = None, dataset_hdx_provider_name: Annotated[str, Query(max_length=512, description=f'{DOC_HDX_PROVIDER_NAME}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): """ - Get information about the resources that are used as data sources for HAPI. Datasets contain one or more resources, - which are the sources of the data found in HAPI. + Get information about the resources that are used as data sources for HDX HAPI. Datasets contain one or + more resources, which are the sources of the data found in HDX HAPI. """ result = await get_resources_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, db=db, - hdx_id=hdx_id, + resource_hdx_id=resource_hdx_id, format=format, update_date_min=update_date_min, update_date_max=update_date_max, is_hxl=is_hxl, dataset_hdx_id=dataset_hdx_id, dataset_hdx_stub=dataset_hdx_stub, - dataset_title=dataset_title, + dataset_hdx_title=dataset_hdx_title, dataset_hdx_provider_stub=dataset_hdx_provider_stub, dataset_hdx_provider_name=dataset_hdx_provider_name, ) diff --git a/hdx_hapi/endpoints/get_humanitarian_needs.py b/hdx_hapi/endpoints/get_humanitarian_needs.py deleted file mode 100644 index 4e9feb51..00000000 --- a/hdx_hapi/endpoints/get_humanitarian_needs.py +++ /dev/null @@ -1,103 +0,0 @@ -from datetime import date -from typing import List, Annotated -from fastapi import Depends, Query, APIRouter -from pydantic import NaiveDatetime - - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.config.doc_snippets import ( - DOC_GENDER_CODE, - DOC_AGE_RANGE_CODE, - DOC_SECTOR_CODE, - DOC_SECTOR_NAME, - DOC_HDX_PROVIDER_STUB, - DOC_ADMIN1_CODE, - DOC_ADMIN2_NAME, - DOC_ADMIN2_CODE, - DOC_LOCATION_CODE, - DOC_LOCATION_NAME, - DOC_SEE_ADMIN1, - DOC_SEE_LOC, - DOC_UPDATE_DATE_MAX, - DOC_UPDATE_DATE_MIN, - DOC_SEE_ADMIN2, -) - -from hdx_hapi.endpoints.models.humanitarian_needs import HumanitarianNeedsResponse -from hdx_hapi.endpoints.util.util import AdminLevel, OutputFormat, pagination_parameters -from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested -from hdx_hapi.services.humanitarian_needs_logic import get_humanitarian_needs_srv -from hdx_hapi.services.sql_alchemy_session import get_db - -router = APIRouter( - tags=['Humanitarian Needs'], -) - - -@router.get( - '/api/themes/humanitarian_needs', - response_model=List[HumanitarianNeedsResponse], - summary='Get humanitarian needs data', - include_in_schema=False, -) -@router.get( - '/api/v1/themes/humanitarian_needs', - response_model=List[HumanitarianNeedsResponse], - summary='Get humanitarian needs data', -) -async def get_humanitarian_needs( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], - db: AsyncSession = Depends(get_db), - gender_code: Annotated[str, Query(max_length=1, description=f'{DOC_GENDER_CODE}')] = None, - age_range_code: Annotated[str, Query(max_length=32, description=f'{DOC_AGE_RANGE_CODE}')] = None, - disabled_marker: Annotated[bool, Query(description='Disabled marker')] = None, - sector_code: Annotated[str, Query(max_length=32, description=f'{DOC_SECTOR_CODE}')] = None, - sector_name: Annotated[str, Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None, - population_group_code: Annotated[str, Query(max_length=32, description='Population group code')] = None, - population_status_code: Annotated[str, Query(max_length=32, description='Population status code')] = None, - population: Annotated[int, Query(description='Population')] = None, - dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None, - resource_update_date_min: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}), - ] = None, - resource_update_date_max: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), - ] = None, - location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, - location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, - admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None, - # admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, - admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None, - admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None, - admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None, - output_format: OutputFormat = OutputFormat.JSON, -): - """ - Return the list of humanitarian needs data - """ - result = await get_humanitarian_needs_srv( - pagination_parameters=pagination_parameters, - db=db, - gender_code=gender_code, - age_range_code=age_range_code, - disabled_marker=disabled_marker, - sector_code=sector_code, - sector_name=sector_name, - population_group_code=population_group_code, - population_status_code=population_status_code, - population=population, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, - location_code=location_code, - location_name=location_name, - admin1_code=admin1_code, - # admin1_name=admin1_name, - admin2_code=admin2_code, - admin2_name=admin2_name, - admin_level=admin_level, - ) - return transform_result_to_csv_stream_if_requested(result, output_format, HumanitarianNeedsResponse) diff --git a/hdx_hapi/endpoints/get_humanitarian_response.py b/hdx_hapi/endpoints/get_humanitarian_response.py index fb19b82f..841e0ef6 100644 --- a/hdx_hapi/endpoints/get_humanitarian_response.py +++ b/hdx_hapi/endpoints/get_humanitarian_response.py @@ -1,4 +1,4 @@ -from typing import List, Annotated +from typing import Annotated from fastapi import Depends, Query, APIRouter @@ -13,9 +13,15 @@ DOC_SEE_ORG_TYPE, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.humanitarian_response import OrgResponse, OrgTypeResponse, SectorResponse -from hdx_hapi.endpoints.util.util import OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + common_endpoint_parameters, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested + from hdx_hapi.services.org_logic import get_orgs_srv from hdx_hapi.services.org_type_logic import get_org_types_srv from hdx_hapi.services.sector_logic import get_sectors_srv @@ -23,23 +29,23 @@ router = APIRouter( - tags=['Humanitarian Organizations and Sectors'], + tags=['Metadata'], ) @router.get( - '/api/org', - response_model=List[OrgResponse], - summary='Get the list of organizations represented in the data available in HAPI', + '/api/metadata/org', + response_model=HapiGenericResponse[OrgResponse], + summary='Get the list of organizations represented in the data available in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/org', - response_model=List[OrgResponse], - summary='Get the list of organizations represented in the data available in HAPI', + '/api/v1/metadata/org', + response_model=HapiGenericResponse[OrgResponse], + summary='Get the list of organizations represented in the data available in HDX HAPI', ) async def get_orgs( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), acronym: Annotated[ str, Query(max_length=32, description=f'{DOC_ORG_ACRONYM}', openapi_examples={'unhcr': {'value': 'unhcr'}}) @@ -64,7 +70,7 @@ async def get_orgs( ): """ """ result = await get_orgs_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, db=db, acronym=acronym, name=name, @@ -75,18 +81,18 @@ async def get_orgs( @router.get( - '/api/org_type', - response_model=List[OrgTypeResponse], - summary='Get information about how organizations are classified in HAPI', + '/api/metadata/org_type', + response_model=HapiGenericResponse[OrgTypeResponse], + summary='Get information about how organizations are classified in HDX HAPI', include_in_schema=False, ) @router.get( - '/api/v1/org_type', - response_model=List[OrgTypeResponse], - summary='Get information about how organizations are classified in HAPI', + '/api/v1/metadata/org-type', + response_model=HapiGenericResponse[OrgTypeResponse], + summary='Get information about how organizations are classified in HDX HAPI', ) async def get_org_types( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), code: Annotated[ str, Query(max_length=32, description=f'{DOC_ORG_TYPE_CODE}', openapi_examples={'433': {'value': '433'}}) @@ -99,28 +105,26 @@ async def get_org_types( ] = None, output_format: OutputFormat = OutputFormat.JSON, ): - """There is no agreed standard for the classification of organizations. The codes and descriptions used in HAPI are - based on this dataset. + """There is no agreed standard for the classification of organizations. The codes and descriptions used in HDX HAPI + are based on this dataset. """ - result = await get_org_types_srv( - pagination_parameters=pagination_parameters, db=db, code=code, description=description - ) + result = await get_org_types_srv(pagination_parameters=common_parameters, db=db, code=code, description=description) return transform_result_to_csv_stream_if_requested(result, output_format, OrgTypeResponse) @router.get( - '/api/sector', - response_model=List[SectorResponse], + '/api/metadata/sector', + response_model=HapiGenericResponse[SectorResponse], summary='Get information about how humanitarian response activities are classified', include_in_schema=False, ) @router.get( - '/api/v1/sector', - response_model=List[SectorResponse], + '/api/v1/metadata/sector', + response_model=HapiGenericResponse[SectorResponse], summary='Get information about how humanitarian response activities are classified', ) async def get_sectors( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), code: Annotated[ str, Query(max_length=32, description=f'{DOC_SECTOR_CODE}', openapi_examples={'hea': {'value': 'hea'}}) @@ -130,11 +134,11 @@ async def get_sectors( ] = None, output_format: OutputFormat = OutputFormat.JSON, ): - """There is no consistent standard for the humanitarian sectors. The codes and descriptions used in HAPI are based - on this dataset. + """There is no consistent standard for the humanitarian sectors. The codes and descriptions used in HDX HAPI are + based on this dataset. """ result = await get_sectors_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, db=db, code=code, name=name, diff --git a/hdx_hapi/endpoints/get_national_risk.py b/hdx_hapi/endpoints/get_national_risk.py index 1eaa6fc4..de22501d 100644 --- a/hdx_hapi/endpoints/get_national_risk.py +++ b/hdx_hapi/endpoints/get_national_risk.py @@ -1,77 +1,89 @@ -from datetime import date -from typing import List, Annotated +from typing import Annotated, Optional from fastapi import Depends, Query, APIRouter -from pydantic import NaiveDatetime +from hapi_schema.utils.enums import RiskClass from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.config.doc_snippets import ( - DOC_HDX_PROVIDER_STUB, DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_LOC, - DOC_UPDATE_DATE_MAX, - DOC_UPDATE_DATE_MIN, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.national_risk import NationalRiskResponse -from hdx_hapi.endpoints.util.util import OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.services.national_risk_logic import get_national_risks_srv from hdx_hapi.services.sql_alchemy_session import get_db router = APIRouter( - tags=['National Risk'], + tags=['Coordination & Context'], ) @router.get( - '/api/themes/national_risk', - response_model=List[NationalRiskResponse], + '/api/coordination-context/national-risk', + response_model=HapiGenericResponse[NationalRiskResponse], summary='Get national risk data', include_in_schema=False, ) -@router.get('/api/v1/themes/national_risk', response_model=List[NationalRiskResponse], summary='Get national risk data') +@router.get( + '/api/v1/coordination-context/national-risk', + response_model=HapiGenericResponse[NationalRiskResponse], + summary='Get national risk data', +) async def get_national_risks( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), - risk_class: Annotated[int, Query(description='Risk class')] = None, - global_rank: Annotated[int, Query(description='Global rank')] = None, - overall_risk: Annotated[float, Query(description='Overall risk')] = None, - hazard_exposure_risk: Annotated[float, Query(description='Hazard exposure risk')] = None, - vulnerability_risk: Annotated[float, Query(description='Vulnerability risk')] = None, - coping_capacity_risk: Annotated[float, Query(description='Coping capacity risk')] = None, - dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description=f'{DOC_HDX_PROVIDER_STUB}')] = None, - resource_update_date_min: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}), + risk_class: Annotated[Optional[RiskClass], Query(description='Risk class')] = None, + global_rank_min: Annotated[Optional[int], Query(description='Global rank, lower bound')] = None, + global_rank_max: Annotated[Optional[int], Query(description='Global rank, upper bound')] = None, + overall_risk_min: Annotated[Optional[float], Query(description='Overall risk, lower bound')] = None, + overall_risk_max: Annotated[Optional[float], Query(description='Overall risk, upper bound')] = None, + hazard_exposure_risk_min: Annotated[Optional[float], Query(description='Hazard exposure risk, lower bound')] = None, + hazard_exposure_risk_max: Annotated[Optional[float], Query(description='Hazard exposure risk, upper bound')] = None, + vulnerability_risk_min: Annotated[Optional[float], Query(description='Vulnerability risk, lower bound')] = None, + vulnerability_risk_max: Annotated[Optional[float], Query(description='Vulnerability risk, upper bound')] = None, + coping_capacity_risk_min: Annotated[Optional[float], Query(description='Coping capacity risk, lower bound')] = None, + coping_capacity_risk_max: Annotated[Optional[float], Query(description='Coping capacity risk, upper bound')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') ] = None, - resource_update_date_max: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') ] = None, - # sector_name: Annotated[str, Query(max_length=512, description=f'{DOC_SECTOR_NAME}')] = None, - location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, - location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, output_format: OutputFormat = OutputFormat.JSON, ): """ - Return the list of national risks + European Commission national risk data from the INFORM-risk framework. + See the more detailed technical HDX HAPI documentation, + and the original INFORM-risk source website. """ + ref_period_parameters = None result = await get_national_risks_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, risk_class=risk_class, - global_rank=global_rank, - overall_risk=overall_risk, - hazard_exposure_risk=hazard_exposure_risk, - vulnerability_risk=vulnerability_risk, - coping_capacity_risk=coping_capacity_risk, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, - # sector_name=sector_name, + global_rank_min=global_rank_min, + global_rank_max=global_rank_max, + overall_risk_min=overall_risk_min, + overall_risk_max=overall_risk_max, + hazard_exposure_risk_min=hazard_exposure_risk_min, + hazard_exposure_risk_max=hazard_exposure_risk_max, + vulnerability_risk_min=vulnerability_risk_min, + vulnerability_risk_max=vulnerability_risk_max, + coping_capacity_risk_min=coping_capacity_risk_min, + coping_capacity_risk_max=coping_capacity_risk_max, location_code=location_code, location_name=location_name, ) diff --git a/hdx_hapi/endpoints/get_operational_presence.py b/hdx_hapi/endpoints/get_operational_presence.py index cf2f286d..d966eaf9 100644 --- a/hdx_hapi/endpoints/get_operational_presence.py +++ b/hdx_hapi/endpoints/get_operational_presence.py @@ -1,62 +1,61 @@ -from datetime import date -from typing import List, Annotated +from typing import Annotated from fastapi import Depends, Query, APIRouter -from pydantic import NaiveDatetime from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.config.doc_snippets import ( + DOC_ADMIN1_REF, DOC_ADMIN1_CODE, DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, DOC_ADMIN2_CODE, DOC_ADMIN2_NAME, + DOC_LOCATION_REF, DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_ADMIN1, DOC_SEE_ADMIN2, DOC_SEE_LOC, + # DOC_HAPI_UPDATED_DATE_MIN, + # DOC_HAPI_UPDATED_DATE_MAX, + # DOC_HAPI_REPLACED_DATE_MIN, + # DOC_HAPI_REPLACED_DATE_MAX, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.operational_presence import OperationalPresenceResponse -from hdx_hapi.endpoints.util.util import AdminLevel, OutputFormat, pagination_parameters +from hdx_hapi.endpoints.util.util import ( + AdminLevel, + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.services.operational_presence_logic import get_operational_presences_srv from hdx_hapi.services.sql_alchemy_session import get_db router = APIRouter( - tags=['3W Operational Presence'], + tags=['Coordination & Context'], ) -SUMMARY_TEXT = ( - 'Get the list of organizations present and in which humanitarian sectors they are working. ' - "There are two versions of this endpoint to support the uppercase and lowercase 'w'" -) +SUMMARY_TEXT = 'Get the list of organizations present and in which humanitarian sectors they are working.' @router.get( - '/api/themes/3w', - response_model=List[OperationalPresenceResponse], + '/api/coordination-context/operational-presence', + response_model=HapiGenericResponse[OperationalPresenceResponse], summary=SUMMARY_TEXT, include_in_schema=False, ) @router.get( - '/api/themes/3W', - response_model=List[OperationalPresenceResponse], + '/api/v1/coordination-context/operational-presence', + response_model=HapiGenericResponse[OperationalPresenceResponse], summary=SUMMARY_TEXT, - include_in_schema=False, -) -@router.get( - '/api/v1/themes/3w', - response_model=List[OperationalPresenceResponse], - summary=SUMMARY_TEXT, -) -@router.get( - '/api/v1/themes/3W', - response_model=List[OperationalPresenceResponse], - summary=SUMMARY_TEXT, - include_in_schema=False, ) async def get_operational_presences( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], db: AsyncSession = Depends(get_db), sector_code: Annotated[ str, @@ -65,7 +64,7 @@ async def get_operational_presences( description=( 'Filter the response by sector codes, which describe the humanitarian sector ' 'to which the operational presence applies. ' - 'See the sector endpoint for details' ), ), @@ -77,7 +76,7 @@ async def get_operational_presences( description=( 'Filter the response by sector names, ' 'which describe the humanitarian sector to which the operational presence applies. ' - 'See the sector endpoint for details' ), ), @@ -89,7 +88,7 @@ async def get_operational_presences( description=( 'Filter the response by the acronym of the organization ' 'to which the operational presence applies. ' - 'See the org endpoint for details' ), ), @@ -101,51 +100,70 @@ async def get_operational_presences( description=( 'Filter the response by the name of the organization ' 'to which the operational presence applies. ' - 'See the org endpoint for details' ), ), ] = None, + location_ref: Annotated[int, Query(description=f'{DOC_LOCATION_REF}')] = None, location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, + admin1_ref: Annotated[int, Query(description=f'{DOC_ADMIN1_REF}')] = None, admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None, admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, # admin1_is_unspecified: Annotated[bool, Query(description='Location Adm1 is not specified')] = None, + admin2_ref: Annotated[int, Query(description={DOC_ADMIN2_REF})] = None, admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None, admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None, admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None, # admin2_is_unspecified: Annotated[bool, Query(description='Location Adm2 is not specified')] = None, - resource_update_date_min: Annotated[ - NaiveDatetime | date, - Query( - description=( - 'Filter the response to data updated on or after this date. ' - 'For example 2020-01-01 or 2020-01-01T00:00:00' - ), - openapi_examples={'2020-01-01': {'value': '2020-01-01'}}, - ), - ] = None, - resource_update_date_max: Annotated[ - NaiveDatetime | date, - Query( - description=( - 'Filter the response to data updated on or before this date. ' - 'For example 2024-12-31 or 2024-12-31T23:59:59' - ), - openapi_examples={'2024-12-31': {'value': '2024-12-31'}}, - ), - ] = None, - dataset_hdx_provider_stub: Annotated[ - str, - Query( - max_length=128, - description=( - 'Filter the query by the organizations contributing the source data to HDX. ' - 'If you want to filter by the organization mentioned in the operational presence record, ' - 'see the org_name and org_acronym parameters below.' - ), - ), - ] = None, + # resource_update_date_min: Annotated[ + # NaiveDatetime | date, + # Query( + # description=( + # 'Filter the response to data updated on or after this date. ' + # 'For example 2020-01-01 or 2020-01-01T00:00:00' + # ), + # openapi_examples={'2020-01-01': {'value': '2020-01-01'}}, + # ), + # ] = None, + # resource_update_date_max: Annotated[ + # NaiveDatetime | date, + # Query( + # description=( + # 'Filter the response to data updated on or before this date. ' + # 'For example 2024-12-31 or 2024-12-31T23:59:59' + # ), + # openapi_examples={'2024-12-31': {'value': '2024-12-31'}}, + # ), + # ] = None, + # hapi_updated_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'), + # ] = None, + # hapi_updated_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'), + # ] = None, + # hapi_replaced_date_min: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'), + # ] = None, + # hapi_replaced_date_max: Annotated[ + # NaiveDatetime | date, + # Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'), + # ] = None, + # dataset_hdx_provider_stub: Annotated[ + # str, + # Query( + # max_length=128, + # description=( + # 'Filter the query by the organizations contributing the source data to HDX. ' + # 'If you want to filter by the organization mentioned in the operational presence record, ' + # 'see the org_name and org_acronym parameters below.' + # ), + # ), + # ] = None, # org_ref: Annotated[int, Query(ge=1, description='Organization reference')] = None, # dataset_hdx_id: Annotated[str, Query(max_length=36, description='HDX Dataset ID')] = None, # dataset_hdx_stub: Annotated[str, Query(max_length=128, description='HDX Dataset Name')] = None, @@ -161,15 +179,23 @@ async def get_operational_presences( """ UNOCHA's 3W (Who is doing What Where) Operational Presence data provides information about which organizations are working in different locations affected by a - crisis. Learn more about 3W + crisis. + See the more detailed technical HDX HAPI documentation, + and the original UNOCHA 3W source website. """ + ref_period_parameters = None result = await get_operational_presences_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, sector_code=sector_code, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + # dataset_hdx_provider_stub=dataset_hdx_provider_stub, + # resource_update_date_min=resource_update_date_min, + # resource_update_date_max=resource_update_date_max, + # hapi_updated_date_min=hapi_updated_date_min, + # hapi_updated_date_max=hapi_updated_date_max, + # hapi_replaced_date_min=hapi_replaced_date_min, + # hapi_replaced_date_max=hapi_replaced_date_max, org_acronym=org_acronym, org_name=org_name, sector_name=sector_name, @@ -177,9 +203,12 @@ async def get_operational_presences( location_name=location_name, admin1_code=admin1_code, admin1_name=admin1_name, + location_ref=location_ref, # admin1_is_unspecified=admin1_is_unspecified, + admin2_ref=admin2_ref, admin2_code=admin2_code, admin2_name=admin2_name, + admin1_ref=admin1_ref, # admin2_is_unspecified=admin2_is_unspecified # dataset_hdx_id=dataset_hdx_id, # dataset_hdx_stub=dataset_hdx_stub, diff --git a/hdx_hapi/endpoints/get_population.py b/hdx_hapi/endpoints/get_population.py index 4f2ce3ec..4bb5d316 100644 --- a/hdx_hapi/endpoints/get_population.py +++ b/hdx_hapi/endpoints/get_population.py @@ -1,85 +1,145 @@ -from datetime import date -from typing import List, Annotated +from typing import Annotated, Optional from fastapi import Depends, Query, APIRouter -from pydantic import NaiveDatetime from sqlalchemy.ext.asyncio import AsyncSession +from hapi_schema.utils.enums import Gender from hdx_hapi.config.doc_snippets import ( + DOC_LOCATION_REF, DOC_LOCATION_CODE, DOC_LOCATION_NAME, DOC_SEE_LOC, - DOC_UPDATE_DATE_MAX, - DOC_UPDATE_DATE_MIN, + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, + DOC_ADMIN2_CODE, + DOC_ADMIN2_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_ADMIN2, + DOC_GENDER, + DOC_AGE_RANGE, ) +from hdx_hapi.endpoints.models.base import HapiGenericResponse from hdx_hapi.endpoints.models.population import PopulationResponse -from hdx_hapi.endpoints.util.util import AdminLevel, OutputFormat, pagination_parameters +from hdx_hapi.endpoints.models.poverty_rate import PovertyRateResponse +from hdx_hapi.endpoints.util.util import ( + CommonEndpointParams, + OutputFormat, + # ReferencePeriodParameters, + common_endpoint_parameters, + # reference_period_parameters, + AdminLevel, +) from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested from hdx_hapi.services.population_logic import get_populations_srv +from hdx_hapi.services.poverty_rate_logic import get_poverty_rates_srv from hdx_hapi.services.sql_alchemy_session import get_db router = APIRouter( - tags=['Baseline Population'], + tags=['Population & Socio-Economy'], ) @router.get( - '/api/themes/population', - response_model=List[PopulationResponse], + '/api/population-social/population', + response_model=HapiGenericResponse[PopulationResponse], summary='Get baseline population data', include_in_schema=False, ) @router.get( - '/api/v1/themes/population', response_model=List[PopulationResponse], summary='Get baseline population data' + '/api/v1/population-social/population', + response_model=HapiGenericResponse[PopulationResponse], + summary='Get baseline population data', ) async def get_populations( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], db: AsyncSession = Depends(get_db), - gender_code: Annotated[str, Query(max_length=1, description='Gender code')] = None, - age_range_code: Annotated[str, Query(max_length=32, description='Age range code')] = None, - population: Annotated[int, Query(description='Population')] = None, - dataset_hdx_provider_stub: Annotated[str, Query(max_length=128, description='Organization(provider) code')] = None, - resource_update_date_min: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MIN}', openapi_examples={'2020-01-01': {'value': '2020-01-01'}}), - ] = None, - resource_update_date_max: Annotated[ - NaiveDatetime | date, - Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}), - ] = None, + gender: Annotated[Optional[Gender], Query(max_length=3, description=f'{DOC_GENDER}')] = None, + age_range: Annotated[Optional[str], Query(max_length=32, description=f'{DOC_AGE_RANGE}')] = None, + population_min: Annotated[int, Query(description='Population, minimum value for filter')] = None, + population_max: Annotated[int, Query(description='Population, maximum value for filter')] = None, + location_ref: Annotated[int, Query(description=f'{DOC_LOCATION_REF}')] = None, location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, - admin1_name: Annotated[str, Query(max_length=512, description='Admin1 name')] = None, - admin1_code: Annotated[str, Query(max_length=128, description='Admin1 code')] = None, - # admin1_is_unspecified: Annotated[bool, Query(description='Is admin1 specified or not')] = None, - admin2_name: Annotated[str, Query(max_length=512, description='Admin2 name')] = None, - admin2_code: Annotated[str, Query(max_length=128, description='Admin2 code')] = None, + admin1_ref: Annotated[int, Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin1_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}')] = None, + admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, + # admin1_is_unspecified: Annotated[bool, Query(description='Location Adm1 is not specified')] = None, + admin2_ref: Annotated[int, Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[str, Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}')] = None, + admin2_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}')] = None, admin_level: Annotated[AdminLevel, Query(description='Filter the response by admin level')] = None, # admin2_is_unspecified: Annotated[bool, Query(description='Is admin2 specified or not')] = None, output_format: OutputFormat = OutputFormat.JSON, ): """ - Return the list of populations + Baseline population data sourced and maintained by UNFPA (UN Population Fund). + See the more detailed technical HDX HAPI documentation, + and the UNFPA on HDX. """ + ref_period_parameters = None result = await get_populations_srv( - pagination_parameters=pagination_parameters, + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, db=db, - gender_code=gender_code, - age_range_code=age_range_code, - population=population, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + gender=gender, + age_range=age_range, + population_min=population_min, + population_max=population_max, + admin1_ref=admin1_ref, + location_ref=location_ref, location_code=location_code, location_name=location_name, admin1_name=admin1_name, admin1_code=admin1_code, - # admin1_is_unspecified=admin1_is_unspecified, - admin2_code=admin2_code, + admin2_ref=admin2_ref, admin2_name=admin2_name, + admin2_code=admin2_code, admin_level=admin_level, - # admin2_is_unspecified=admin2_is_unspecified, ) return transform_result_to_csv_stream_if_requested(result, output_format, PopulationResponse) + + +@router.get( + '/api/population-social/poverty-rate', + response_model=HapiGenericResponse[PovertyRateResponse], + summary='Get poverty rate data', + include_in_schema=False, +) +@router.get( + '/api/v1/population-social/poverty-rate', + response_model=HapiGenericResponse[PovertyRateResponse], + summary='Get poverty rate data', +) +async def get_poverty_rates( + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + # ref_period_parameters: Annotated[ReferencePeriodParameters, Depends(reference_period_parameters)], + db: AsyncSession = Depends(get_db), + mpi_min: Annotated[Optional[float], Query(description='Multidimensional Poverty Index (MPI), lower bound')] = None, + mpi_max: Annotated[Optional[float], Query(description='Multidimensional Poverty Index (MPI), upper bound')] = None, + location_code: Annotated[str, Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}')] = None, + location_name: Annotated[str, Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}')] = None, + admin1_name: Annotated[str, Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + Poverty rate data from the Oxford Department of International Development. + See the more detailed technical HDX HAPI documentation, + and the Oxford Department of International Development website. + """ + ref_period_parameters = None + result = await get_poverty_rates_srv( + pagination_parameters=common_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + mpi_min=mpi_min, + mpi_max=mpi_max, + location_code=location_code, + location_name=location_name, + admin1_name=admin1_name, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, PovertyRateResponse) diff --git a/hdx_hapi/endpoints/get_population_profile.py b/hdx_hapi/endpoints/get_population_profile.py deleted file mode 100644 index 97d0e2c9..00000000 --- a/hdx_hapi/endpoints/get_population_profile.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import List, Annotated -from fastapi import Depends, Query, APIRouter - - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.endpoints.models.population_profile import PopulationGroupResponse, PopulationStatusResponse -from hdx_hapi.endpoints.util.util import OutputFormat, pagination_parameters -from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested -from hdx_hapi.services.population_group_logic import get_population_groups_srv -from hdx_hapi.services.population_status_logic import get_population_statuses_srv -from hdx_hapi.services.sql_alchemy_session import get_db - -router = APIRouter( - tags=['Population Groups and Statuses'], -) - - -@router.get( - '/api/population_group', - response_model=List[PopulationGroupResponse], - summary='Get population groups data', - include_in_schema=False, -) -@router.get( - '/api/v1/population_group', response_model=List[PopulationGroupResponse], summary='Get population groups data' -) -async def get_population_groups( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], - db: AsyncSession = Depends(get_db), - code: Annotated[str, Query(max_length=32, description='Population group code')] = None, - description: Annotated[str, Query(max_length=512, description='Population group description')] = None, - output_format: OutputFormat = OutputFormat.JSON, -): - """ - Return the list of population groups - """ - result = await get_population_groups_srv( - pagination_parameters=pagination_parameters, - db=db, - code=code, - description=description, - ) - return transform_result_to_csv_stream_if_requested(result, output_format, PopulationGroupResponse) - - -@router.get( - '/api/population_status', - response_model=List[PopulationStatusResponse], - summary='Get population statuses data', - include_in_schema=False, -) -@router.get( - '/api/v1/population_status', response_model=List[PopulationStatusResponse], summary='Get population statuses data' -) -async def get_population_statuses( - pagination_parameters: Annotated[dict, Depends(pagination_parameters)], - db: AsyncSession = Depends(get_db), - code: Annotated[str, Query(max_length=32, description='Population status code')] = None, - description: Annotated[str, Query(max_length=512, description='Population status description')] = None, - output_format: OutputFormat = OutputFormat.JSON, -): - """ - Return the list of population statuses - """ - result = await get_population_statuses_srv( - pagination_parameters=pagination_parameters, - db=db, - code=code, - description=description, - ) - return transform_result_to_csv_stream_if_requested(result, output_format, PopulationStatusResponse) diff --git a/hdx_hapi/endpoints/get_version.py b/hdx_hapi/endpoints/get_version.py new file mode 100644 index 00000000..c5906da1 --- /dev/null +++ b/hdx_hapi/endpoints/get_version.py @@ -0,0 +1,29 @@ +from fastapi import APIRouter + +from hdx_hapi.endpoints.models.version import VersionResponse +from hdx_hapi.endpoints.util import version as hapi_version + +router = APIRouter( + tags=['Util'], +) + +SUMMARY_TEXT = 'Display the API and SQL Alchemy versions' + + +@router.get( + '/api/util/version', + response_model=VersionResponse, + summary=SUMMARY_TEXT, + include_in_schema=False, +) +@router.get( + '/api/v1/util/version', + response_model=VersionResponse, + summary=SUMMARY_TEXT, +) +async def get_version(): + result = { + 'api_version': hapi_version.api_version, + 'hapi_sqlalchemy_schema_version': hapi_version.hapi_sqlalchemy_schema_version, + } + return result diff --git a/hdx_hapi/endpoints/get_wfp_commodity.py b/hdx_hapi/endpoints/get_wfp_commodity.py new file mode 100644 index 00000000..12abd2fd --- /dev/null +++ b/hdx_hapi/endpoints/get_wfp_commodity.py @@ -0,0 +1,46 @@ +from typing import Annotated, Optional +from fastapi import APIRouter, Depends, Query +from hapi_schema.utils.enums import CommodityCategory +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.wfp_commodity import WfpCommodityResponse +from hdx_hapi.endpoints.util.util import CommonEndpointParams, OutputFormat, common_endpoint_parameters +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.sql_alchemy_session import get_db +from hdx_hapi.services.wfp_commodity_logic import get_wfp_commodities_srv + + +router = APIRouter( + tags=['Metadata'], +) + +SUMMARY = 'Get the list of WFP commodities' + + +@router.get( + '/api/metadata/wfp-commodity', + response_model=HapiGenericResponse[WfpCommodityResponse], + summary=SUMMARY, + include_in_schema=False, +) +@router.get( + '/api/v1/metadata/wfp-commodity', + response_model=HapiGenericResponse[WfpCommodityResponse], + summary=SUMMARY, +) +async def get_wfp_commodities( + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + code: Annotated[Optional[str], Query(max_length=32, description='Commodity code')] = None, + category: Annotated[Optional[CommodityCategory], Query(description='Commodity category')] = None, + name: Annotated[Optional[str], Query(max_length=512, description='Commodity name')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + Provide commodity information to use in conjunction with the food-prices endpoint + """ + result = await get_wfp_commodities_srv( + pagination_parameters=common_parameters, db=db, code=code, category=category, name=name + ) + return transform_result_to_csv_stream_if_requested(result, output_format, WfpCommodityResponse) diff --git a/hdx_hapi/endpoints/get_wfp_market.py b/hdx_hapi/endpoints/get_wfp_market.py new file mode 100644 index 00000000..46a4ca0f --- /dev/null +++ b/hdx_hapi/endpoints/get_wfp_market.py @@ -0,0 +1,98 @@ +from typing import Annotated, Optional +from fastapi import Depends, Query, APIRouter + +from sqlalchemy.ext.asyncio import AsyncSession +from hdx_hapi.config.doc_snippets import ( + DOC_ADMIN1_REF, + DOC_ADMIN1_CODE, + DOC_ADMIN1_NAME, + DOC_ADMIN2_REF, + DOC_ADMIN2_CODE, + DOC_ADMIN2_NAME, + DOC_LOCATION_REF, + DOC_LOCATION_CODE, + DOC_LOCATION_NAME, + DOC_SEE_ADMIN1, + DOC_SEE_ADMIN2, + DOC_SEE_LOC, +) + +from hdx_hapi.endpoints.models.base import HapiGenericResponse +from hdx_hapi.endpoints.models.wfp_market import WfpMarketResponse +from hdx_hapi.endpoints.util.util import ( + AdminLevel, + CommonEndpointParams, + OutputFormat, + common_endpoint_parameters, +) +from hdx_hapi.services.csv_transform_logic import transform_result_to_csv_stream_if_requested +from hdx_hapi.services.sql_alchemy_session import get_db +from hdx_hapi.services.wfp_market_logic import get_wfp_markets_srv + +router = APIRouter( + tags=['Metadata'], +) + +SUMMARY_TEXT = 'Get the list of WFP markets.' + + +@router.get( + '/api/metadata/wfp-market', + response_model=HapiGenericResponse[WfpMarketResponse], + summary=SUMMARY_TEXT, + include_in_schema=False, +) +@router.get( + '/api/v1/metadata/wfp-market', + response_model=HapiGenericResponse[WfpMarketResponse], + summary=SUMMARY_TEXT, +) +async def get_wfp_markets( + common_parameters: Annotated[CommonEndpointParams, Depends(common_endpoint_parameters)], + db: AsyncSession = Depends(get_db), + code: Annotated[Optional[str], Query(max_length=32, description='Commodity code')] = None, + name: Annotated[Optional[str], Query(max_length=512, description='Commodity name')] = None, + location_ref: Annotated[Optional[int], Query(description=f'{DOC_LOCATION_REF}')] = None, + location_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_LOCATION_CODE} {DOC_SEE_LOC}') + ] = None, + location_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_LOCATION_NAME} {DOC_SEE_LOC}') + ] = None, + admin1_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN1_REF}')] = None, + admin1_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN1_CODE} {DOC_SEE_ADMIN1}') + ] = None, + admin1_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN1_NAME} {DOC_SEE_ADMIN1}') + ] = None, + admin2_ref: Annotated[Optional[int], Query(description=f'{DOC_ADMIN2_REF}')] = None, + admin2_code: Annotated[ + Optional[str], Query(max_length=128, description=f'{DOC_ADMIN2_CODE} {DOC_SEE_ADMIN2}') + ] = None, + admin2_name: Annotated[ + Optional[str], Query(max_length=512, description=f'{DOC_ADMIN2_NAME} {DOC_SEE_ADMIN2}') + ] = None, + admin_level: Annotated[Optional[AdminLevel], Query(description='Filter the response by admin level')] = None, + output_format: OutputFormat = OutputFormat.JSON, +): + """ + Provide physical market location information to use in conjunction with the food-prices endpoint + """ + result = await get_wfp_markets_srv( + pagination_parameters=common_parameters, + db=db, + code=code, + name=name, + location_code=location_code, + location_name=location_name, + admin1_code=admin1_code, + admin1_name=admin1_name, + location_ref=location_ref, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin1_ref=admin1_ref, + admin_level=admin_level, + ) + return transform_result_to_csv_stream_if_requested(result, output_format, WfpMarketResponse) diff --git a/hdx_hapi/endpoints/middleware/__init__.py b/hdx_hapi/endpoints/middleware/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hdx_hapi/endpoints/middleware/app_identifier_middleware.py b/hdx_hapi/endpoints/middleware/app_identifier_middleware.py new file mode 100644 index 00000000..ab7c089e --- /dev/null +++ b/hdx_hapi/endpoints/middleware/app_identifier_middleware.py @@ -0,0 +1,59 @@ +from fastapi import Request, status +from fastapi.responses import JSONResponse +from pydantic import BaseModel, EmailStr + +from hdx_hapi.config.config import get_config +from hdx_hapi.endpoints.util.util import app_name_identifier_query, email_identifier_query + +import base64 +import logging + + +logger = logging.getLogger(__name__) + +CONFIG = get_config() + + +ALLOWED_API_ENDPOINTS = { + '/api/v1/encode_app_identifier', + '/api/encode_app_identifier', + '/api/v1/util/version', + '/api/util/version', +} + + +# I've tried using the Pydantic model directly in the get_encoded_identifier endpoint, but then the generated OpenAPI +# specs don't include all the details anymore +class IdentifierParams(BaseModel): + application: str = app_name_identifier_query + email: EmailStr = email_identifier_query + + +async def app_identifier_middleware(request: Request, call_next): + """ + Middleware to check for the app_identifier in the request and add it to the request state + """ + if ( + CONFIG.HAPI_IDENTIFIER_FILTERING + and request.url.path.startswith('/api') + and request.url.path not in ALLOWED_API_ENDPOINTS + ): + app_identifier = request.query_params.get('app_identifier') + authorization = request.headers.get('X-HDX-HAPI-APP-IDENTIFIER') + encoded_value = app_identifier or authorization + + if not encoded_value: + return JSONResponse(content={'error': 'Missing app identifier'}, status_code=status.HTTP_400_BAD_REQUEST) + + try: + decoded_value = base64.b64decode(encoded_value).decode('utf-8') + application, email = decoded_value.split(':') + identifier_params = IdentifierParams(application=application, email=email) + logger.warning(f'Application: {application}, Email: {email}') + # Adding the app_name to the request state so it can be accessed in the endpoint + request.state.app_name = identifier_params.application + except Exception: + return JSONResponse(content={'error': 'Invalid app identifier'}, status_code=status.HTTP_400_BAD_REQUEST) + + response = await call_next(request) + return response diff --git a/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py b/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py new file mode 100644 index 00000000..91c372fb --- /dev/null +++ b/hdx_hapi/endpoints/middleware/mixpanel_tracking_middleware.py @@ -0,0 +1,33 @@ +from fastapi import Request, BackgroundTasks + +from hdx_hapi.config.config import get_config +from hdx_hapi.endpoints.middleware.util.util import track_api_call, track_page_view + +import logging + + +logger = logging.getLogger(__name__) + +CONFIG = get_config() + + +async def mixpanel_tracking_middleware(request: Request, call_next): + """ + Middleware to track Mixpanel events + """ + + background_tasks = BackgroundTasks() + + response = await call_next(request) + + + if CONFIG.MIXPANEL: + if request.url.path.startswith('/api'): + background_tasks.add_task(track_api_call, request, response) + elif request.url.path.startswith('/docs'): + background_tasks.add_task(track_page_view, request, response) + else: + logger.warning('HDX_MIXPANEL_TOKEN environment variable is not set.') + response.background = background_tasks + + return response diff --git a/hdx_hapi/endpoints/middleware/util/__init__.py b/hdx_hapi/endpoints/middleware/util/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hdx_hapi/endpoints/middleware/util/util.py b/hdx_hapi/endpoints/middleware/util/util.py new file mode 100644 index 00000000..05db8640 --- /dev/null +++ b/hdx_hapi/endpoints/middleware/util/util.py @@ -0,0 +1,111 @@ +import logging +import hashlib +import time +import ua_parser.user_agent_parser as useragent +from fastapi import Request, Response + +from hdx_hapi.config.config import get_config + + +logger = logging.getLogger(__name__) + +_CONFIG = get_config() + + +async def track_api_call(request: Request, response: Response): + current_url = str(request.url) + endpoint = request.url.path + query_params = list(request.query_params.keys()) + output_format = request.query_params.get('output_format', '') + admin_level = request.query_params.get('admin_level', '') + + app_name = getattr(request.state, 'app_name', None) + user_agent_string = request.headers.get('user-agent', '') + ip_address = request.headers.get('HTTP_X_REAL_IP', '') + + response_code = response.status_code + + distinct_id = HashCodeGenerator({'ip': ip_address, 'ua': user_agent_string}).compute_hash() + event_time = time.time() + + ua_dict = useragent.Parse(user_agent_string) + ua_os = ua_dict.get('os', {}).get('family') + ua_browser = ua_dict.get('user_agent', {}).get('family') + ua_browser_version = ua_dict.get('user_agent', {}).get('major') + + mixpanel_dict = { + 'endpoint name': endpoint, + 'query params': query_params, + 'time': event_time, + 'app name': app_name, + 'output format': output_format, + 'admin level': admin_level, + 'server side': True, + 'response code': response_code, + 'user agent': user_agent_string, + 'ip': ip_address, + '$os': ua_os, + '$browser': ua_browser, + '$browser_version': ua_browser_version, + '$current_url': current_url, + } + await send_mixpanel_event('api call', distinct_id, mixpanel_dict) + + +async def track_page_view(request: Request, response: Response): + current_url = str(request.url) + user_agent_string = request.headers.get('user-agent', '') + ip_address = request.headers.get('HTTP_X_REAL_IP', '') + response_code = response.status_code + distinct_id = HashCodeGenerator({'ip': ip_address, 'ua': user_agent_string}).compute_hash() + event_time = time.time() + ua_dict = useragent.Parse(user_agent_string) + ua_os = ua_dict.get('os', {}).get('family') + ua_browser = ua_dict.get('user_agent', {}).get('family') + ua_browser_version = ua_dict.get('user_agent', {}).get('major') + + page_view_dict = { + 'page title': 'HAPI - OpenAPI Docs', + 'time': event_time, + 'server side': True, + 'response code': response_code, + 'user agent': user_agent_string, + 'ip': ip_address, + '$os': ua_os, + '$browser': ua_browser, + '$browser_version': ua_browser_version, + '$current_url': current_url, + } + await send_mixpanel_event('page view', distinct_id, page_view_dict) + + +async def send_mixpanel_event(event_name: str, distinct_id: str, event_data: dict): + _CONFIG.MIXPANEL.track(distinct_id, event_name, event_data) + + +class HashCodeGenerator(object): + """ + Works only on simple dictionaries (not nested). At least the specified fields need to not be nested. + """ + + def __init__(self, src_dict, field_list=None): + if not field_list and src_dict: + field_list = list(src_dict.keys()) + + field_list.sort() + try: + self.__inner_string = '' + if field_list and src_dict: + for field in field_list: + self.__inner_string += '{}-{},'.format(field, src_dict.get(field)) + else: + raise Exception('Either field list or source dict are null') + except Exception: + raise Exception('Exception while trying to generate hash code') + + def compute_hash(self): + hash_builder = hashlib.md5() + hash_builder.update(self.__inner_string.encode()) + hash_code = hash_builder.hexdigest() + logger.debug('Generated code for {} is {}'.format(self.__inner_string, hash_code)) + return hash_code diff --git a/hdx_hapi/endpoints/models/admin_level.py b/hdx_hapi/endpoints/models/admin_level.py index f2496ca1..db355a91 100644 --- a/hdx_hapi/endpoints/models/admin_level.py +++ b/hdx_hapi/endpoints/models/admin_level.py @@ -1,3 +1,5 @@ +from datetime import datetime +from typing import Optional from pydantic import ConfigDict, Field from hdx_hapi.endpoints.models.base import HapiBaseModel @@ -7,6 +9,9 @@ class LocationResponse(HapiBaseModel): code: str = Field(max_length=128) name: str = Field(max_length=512) + reference_period_start: datetime + reference_period_end: Optional[datetime] + model_config = ConfigDict(from_attributes=True) @@ -15,6 +20,10 @@ class Admin1Response(HapiBaseModel): # location_ref: int code: str = Field(max_length=128) name: str = Field(max_length=512) + # hapi_updated_date: datetime + # hapi_replaced_date: Optional[datetime] + reference_period_start: datetime + reference_period_end: Optional[datetime] location_code: str = Field(max_length=128) location_name: str = Field(max_length=512) @@ -26,6 +35,10 @@ class Admin2Response(HapiBaseModel): # admin1_ref: int code: str = Field(max_length=128) name: str = Field(max_length=512) + # hapi_updated_date: datetime + # hapi_replaced_date: Optional[datetime] + reference_period_start: datetime + reference_period_end: Optional[datetime] admin1_code: str = Field(max_length=128) admin1_name: str = Field(max_length=512) diff --git a/hdx_hapi/endpoints/models/base.py b/hdx_hapi/endpoints/models/base.py index 2406067e..677cc5e4 100644 --- a/hdx_hapi/endpoints/models/base.py +++ b/hdx_hapi/endpoints/models/base.py @@ -1,7 +1,50 @@ -from typing import List -from pydantic import BaseModel +from typing import Generic, List, Optional, TypeVar +from typing_extensions import Self +from pydantic import BaseModel, ConfigDict, Field, model_validator class HapiBaseModel(BaseModel): def list_of_fields(self) -> List[str]: return list(self.model_fields.keys()) + + +class HapiModelWithAdmins(BaseModel): + location_ref: int + location_code: str = Field(max_length=128) + location_name: str = Field(max_length=512) + + admin1_is_unspecified: bool = Field(exclude=True) + admin2_is_unspecified: bool = Field(exclude=True) + + admin1_ref: int + admin1_code: Optional[str] = Field(max_length=128) + admin1_name: Optional[str] = Field(max_length=512) + admin2_ref: int + admin2_code: Optional[str] = Field(max_length=128) + admin2_name: Optional[str] = Field(max_length=512) + + @model_validator(mode='after') # type: ignore + def set_admin1_admin2_null(self) -> Self: + admin1_is_unspecified = self.admin1_is_unspecified + admin2_is_unspecified = self.admin2_is_unspecified + + # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None + if admin1_is_unspecified: + self.admin1_code = None + self.admin1_name = None + + # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None + if admin2_is_unspecified: + self.admin2_code = None + self.admin2_name = None + + return self + + +DataT = TypeVar('DataT') + + +class HapiGenericResponse(BaseModel, Generic[DataT]): + data: List[DataT] + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/conflict_event.py b/hdx_hapi/endpoints/models/conflict_event.py new file mode 100644 index 00000000..72885f04 --- /dev/null +++ b/hdx_hapi/endpoints/models/conflict_event.py @@ -0,0 +1,17 @@ +from hapi_schema.utils.enums import EventType +from pydantic import ConfigDict, Field, NaiveDatetime +from typing import Optional + +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins + + +class ConflictEventResponse(HapiBaseModel, HapiModelWithAdmins): + resource_hdx_id: str = Field(max_length=36) + event_type: EventType + events: Optional[int] + fatalities: Optional[int] + + reference_period_start: NaiveDatetime + reference_period_end: Optional[NaiveDatetime] + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/currency.py b/hdx_hapi/endpoints/models/currency.py new file mode 100644 index 00000000..9f14bfb8 --- /dev/null +++ b/hdx_hapi/endpoints/models/currency.py @@ -0,0 +1,9 @@ +from pydantic import ConfigDict, Field +from hdx_hapi.endpoints.models.base import HapiBaseModel + + +class CurrencyResponse(HapiBaseModel): + code: str = Field(max_length=32) + name: str = Field(max_length=512) + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/demographic.py b/hdx_hapi/endpoints/models/demographic.py deleted file mode 100644 index 4811dca0..00000000 --- a/hdx_hapi/endpoints/models/demographic.py +++ /dev/null @@ -1,19 +0,0 @@ -from pydantic import ConfigDict, Field -from typing import Optional - -from hdx_hapi.endpoints.models.base import HapiBaseModel - - -class AgeRangeResponse(HapiBaseModel): - code: str = Field(max_length=32) - age_min: int = None - age_max: Optional[int] = None - - model_config = ConfigDict(from_attributes=True) - - -class GenderResponse(HapiBaseModel): - code: str = Field(max_length=1) - description: str = Field(max_length=256) - - model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/encoded_identifier.py b/hdx_hapi/endpoints/models/encoded_identifier.py index cbb7555f..db589f25 100644 --- a/hdx_hapi/endpoints/models/encoded_identifier.py +++ b/hdx_hapi/endpoints/models/encoded_identifier.py @@ -3,4 +3,4 @@ class IdentifierResponse(HapiBaseModel): - encoded_identifier: str = Field(max_length=512) + encoded_app_identifier: str = Field(max_length=512) diff --git a/hdx_hapi/endpoints/models/food_price.py b/hdx_hapi/endpoints/models/food_price.py new file mode 100644 index 00000000..468d0462 --- /dev/null +++ b/hdx_hapi/endpoints/models/food_price.py @@ -0,0 +1,29 @@ +from typing import Optional +from pydantic import ConfigDict, Field, NaiveDatetime +from hapi_schema.utils.enums import CommodityCategory, PriceFlag, PriceType +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins +from hdx_hapi.endpoints.models.util.constants import NON_NEGATIVE_DECIMAL_TYPE + + +class FoodPriceResponse(HapiBaseModel, HapiModelWithAdmins): + resource_hdx_id: str = Field(max_length=36) + + market_code: str = Field(max_length=32) + market_name: str = Field(max_length=512) + commodity_code: str = Field(max_length=32) + commodity_name: str = Field(max_length=512) + commodity_category: CommodityCategory + + currency_code: str = Field(max_length=32) + unit: str = Field(max_length=32) + + price_flag: PriceFlag + price_type: PriceType + price: NON_NEGATIVE_DECIMAL_TYPE + lat: float = Field(ge=-90.0, le=90.0) + lon: float = Field(ge=-180.0, le=180.0) + + reference_period_start: NaiveDatetime + reference_period_end: Optional[NaiveDatetime] + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/food_security.py b/hdx_hapi/endpoints/models/food_security.py index b6974ec7..e9915621 100644 --- a/hdx_hapi/endpoints/models/food_security.py +++ b/hdx_hapi/endpoints/models/food_security.py @@ -1,50 +1,17 @@ -from pydantic import ConfigDict, Field, model_validator, NaiveDatetime +from pydantic import ConfigDict, Field, NaiveDatetime from typing import Optional -from hdx_hapi.endpoints.models.base import HapiBaseModel +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins -class FoodSecurityResponse(HapiBaseModel): +class FoodSecurityResponse(HapiBaseModel, HapiModelWithAdmins): + resource_hdx_id: str = Field(max_length=36) + admin2_ref: int = None + ipc_phase: str = Field(max_length=32) + ipc_type: str = Field(max_length=32) population_in_phase: int population_fraction_in_phase: float - - ipc_phase_code: str = Field(max_length=32) - ipc_phase_name: str = Field(max_length=32) - ipc_type_code: str = Field(max_length=32) - reference_period_start: Optional[NaiveDatetime] reference_period_end: Optional[NaiveDatetime] - dataset_hdx_stub: str = Field(max_length=128) - dataset_hdx_provider_stub: str = Field(max_length=128) - resource_hdx_id: str = Field(max_length=36) - - location_code: str = Field(max_length=128) - location_name: str = Field(max_length=512) - - admin1_is_unspecified: bool = Field(exclude=True) - admin2_is_unspecified: bool = Field(exclude=True) - - admin1_code: Optional[str] = Field(max_length=128) - admin1_name: Optional[str] = Field(max_length=512) - admin2_code: Optional[str] = Field(max_length=128) - admin2_name: Optional[str] = Field(max_length=512) - model_config = ConfigDict(from_attributes=True) - - @model_validator(mode='after') - def set_admin1_admin2_null(self) -> 'FoodSecurityResponse': - admin1_is_unspecified = self.admin1_is_unspecified - admin2_is_unspecified = self.admin2_is_unspecified - - # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None - if admin1_is_unspecified: - self.admin1_code = None - self.admin1_name = None - - # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None - if admin2_is_unspecified: - self.admin2_code = None - self.admin2_name = None - - return self diff --git a/hdx_hapi/endpoints/models/funding.py b/hdx_hapi/endpoints/models/funding.py new file mode 100644 index 00000000..05d4eb72 --- /dev/null +++ b/hdx_hapi/endpoints/models/funding.py @@ -0,0 +1,26 @@ +from pydantic import ConfigDict, Field, NaiveDatetime +from typing import Optional + +from hdx_hapi.endpoints.models.base import HapiBaseModel +from hdx_hapi.endpoints.models.util.constants import NON_NEGATIVE_DECIMAL_TYPE + + +class FundingResponse(HapiBaseModel): + resource_hdx_id: str = Field(max_length=36) + + appeal_code: str = Field(max_length=32) + appeal_name: str = Field(max_length=256) + appeal_type: str = Field(max_length=32) + + requirements_usd: NON_NEGATIVE_DECIMAL_TYPE + funding_usd: NON_NEGATIVE_DECIMAL_TYPE + funding_pct: NON_NEGATIVE_DECIMAL_TYPE + + location_ref: int + location_code: str = Field(max_length=128) + location_name: str = Field(max_length=512) + + reference_period_start: NaiveDatetime + reference_period_end: Optional[NaiveDatetime] + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/hdx_metadata.py b/hdx_hapi/endpoints/models/hdx_metadata.py index 01075307..87e921e5 100644 --- a/hdx_hapi/endpoints/models/hdx_metadata.py +++ b/hdx_hapi/endpoints/models/hdx_metadata.py @@ -7,13 +7,15 @@ get_resource_api_url, get_dataset_url, get_dataset_api_url, + get_organization_url, + get_organization_api_url, ) class DatasetResponse(HapiBaseModel): - hdx_id: str = Field(max_length=36) - hdx_stub: str = Field(max_length=128) - title: str = Field(max_length=1024) + dataset_hdx_id: str = Field(max_length=36) + dataset_hdx_stub: str = Field(max_length=128) + dataset_hdx_title: str = Field(max_length=1024) hdx_provider_stub: str = Field(max_length=128) hdx_provider_name: str = Field(max_length=512) @@ -22,50 +24,61 @@ class DatasetResponse(HapiBaseModel): @computed_field @property def hdx_link(self) -> HttpUrl: - return get_dataset_url(dataset_id=self.hdx_id) - + return get_dataset_url(dataset_id=self.dataset_hdx_id) @computed_field @property def hdx_api_link(self) -> HttpUrl: - return get_dataset_api_url(dataset_id=self.hdx_id) + return get_dataset_api_url(dataset_id=self.dataset_hdx_id) + + @computed_field + @property + def provider_hdx_link(self) -> HttpUrl: + return get_organization_url(org_id=self.hdx_provider_stub) + + model_config = ConfigDict(from_attributes=True) + @computed_field + @property + def provider_hdx_api_link(self) -> HttpUrl: + return get_organization_api_url(org_id=self.hdx_provider_stub) model_config = ConfigDict(from_attributes=True) def list_of_fields(self) -> List[str]: fields = super().list_of_fields() - fields.extend(['hdx_link', 'api_link']) + fields.extend(['hdx_link', 'api_link', 'provider_hdx_link', 'provider_hdx_api_link']) return fields + class ResourceResponse(HapiBaseModel): # id: int - hdx_id: str = Field(max_length=36) + resource_hdx_id: str = Field(max_length=36) + dataset_hdx_id: str = Field(max_length=36) name: str = Field(max_length=256) format: str = Field(max_length=32) update_date: datetime is_hxl: bool download_url: HttpUrl + hapi_updated_date: datetime - dataset_hdx_id: str = Field(max_length=36) dataset_hdx_stub: str = Field(max_length=128) - - dataset_title: str = Field(max_length=1024) + + dataset_hdx_title: str = Field(max_length=1024) dataset_hdx_provider_stub: str = Field(max_length=128) dataset_hdx_provider_name: str = Field(max_length=512) - # computed fields @computed_field @property def hdx_link(self) -> HttpUrl: - return get_resource_url(dataset_id=self.dataset_hdx_id, resource_id=self.hdx_id) + return get_resource_url(dataset_id=self.dataset_hdx_id, resource_id=self.resource_hdx_id) @computed_field @property def hdx_api_link(self) -> HttpUrl: - return get_resource_api_url(resource_id=self.hdx_id) + return get_resource_api_url(resource_id=self.resource_hdx_id) @computed_field @property @@ -77,9 +90,19 @@ def dataset_hdx_link(self) -> HttpUrl: def dataset_hdx_api_link(self) -> HttpUrl: return get_dataset_api_url(dataset_id=self.dataset_hdx_id) + @computed_field + @property + def provider_hdx_link(self) -> HttpUrl: + return get_organization_url(org_id=self.dataset_hdx_provider_stub) + + @computed_field + @property + def provider_hdx_api_link(self) -> HttpUrl: + return get_organization_api_url(org_id=self.dataset_hdx_provider_stub) + model_config = ConfigDict(from_attributes=True) def list_of_fields(self) -> List[str]: fields = super().list_of_fields() - fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_api_link']) + fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_hdx_api_link']) return fields diff --git a/hdx_hapi/endpoints/models/humanitarian_needs.py b/hdx_hapi/endpoints/models/humanitarian_needs.py index ffba8815..fa6c1a52 100644 --- a/hdx_hapi/endpoints/models/humanitarian_needs.py +++ b/hdx_hapi/endpoints/models/humanitarian_needs.py @@ -1,52 +1,23 @@ -from pydantic import ConfigDict, Field, model_validator, NaiveDatetime +from pydantic import ConfigDict, Field, NaiveDatetime from typing import Optional -from hdx_hapi.endpoints.models.base import HapiBaseModel +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins +from hapi_schema.utils.enums import Gender, PopulationGroup, PopulationStatus, DisabledMarker -class HumanitarianNeedsResponse(HapiBaseModel): - gender_code: Optional[str] = Field(max_length=1) - age_range_code: Optional[str] = Field(max_length=32) - disabled_marker: Optional[bool] = None - sector_code: Optional[str] = Field(max_length=32) - sector_name: Optional[str] = Field(max_length=512) - population_status_code: Optional[str] = Field(max_length=32) - population_group_code: Optional[str] = Field(max_length=32) - population: int = None - - reference_period_start: Optional[NaiveDatetime] - reference_period_end: Optional[NaiveDatetime] - - dataset_hdx_stub: str = Field(max_length=128) - dataset_hdx_provider_stub: str = Field(max_length=128) +class HumanitarianNeedsResponse(HapiBaseModel, HapiModelWithAdmins): resource_hdx_id: str = Field(max_length=36) - - location_code: str = Field(max_length=128) - location_name: str = Field(max_length=512) - - admin1_is_unspecified: bool = Field(exclude=True) - admin2_is_unspecified: bool = Field(exclude=True) - - admin1_code: Optional[str] = Field(max_length=128) - admin1_name: Optional[str] = Field(max_length=512) - admin2_code: Optional[str] = Field(max_length=128) - admin2_name: Optional[str] = Field(max_length=512) + gender: Gender + age_range: str = Field(max_length=32) + min_age: Optional[int] = Field(ge=0) + max_age: Optional[int] = Field(ge=0) + disabled_marker: DisabledMarker + sector_code: str = Field(max_length=32) + population_group: PopulationGroup + population_status: PopulationStatus + population: int = Field(ge=0) + reference_period_start: NaiveDatetime + reference_period_end: Optional[NaiveDatetime] + sector_name: Optional[str] = Field(max_length=512) model_config = ConfigDict(from_attributes=True) - - @model_validator(mode='after') - def set_admin1_admin2_null(self) -> 'HumanitarianNeedsResponse': - admin1_is_unspecified = self.admin1_is_unspecified - admin2_is_unspecified = self.admin2_is_unspecified - - # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None - if admin1_is_unspecified: - self.admin1_code = None - self.admin1_name = None - - # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None - if admin2_is_unspecified: - self.admin2_code = None - self.admin2_name = None - - return self diff --git a/hdx_hapi/endpoints/models/national_risk.py b/hdx_hapi/endpoints/models/national_risk.py index 07093ba8..258c8a9d 100644 --- a/hdx_hapi/endpoints/models/national_risk.py +++ b/hdx_hapi/endpoints/models/national_risk.py @@ -1,26 +1,32 @@ +from hapi_schema.utils.enums import RiskClass from pydantic import ConfigDict, Field, NaiveDatetime from typing import Optional from hdx_hapi.endpoints.models.base import HapiBaseModel +from hdx_hapi.endpoints.models.util.constants import PERCENTAGE_TYPE + +RISK_TYPE = Field(ge=0, le=10) class NationalRiskResponse(HapiBaseModel): - risk_class: int - global_rank: int - overall_risk: float - hazard_exposure_risk: float - vulnerability_risk: float - coping_capacity_risk: float + risk_class: RiskClass + global_rank: int = Field(ge=1, le=250) + overall_risk: float = RISK_TYPE + hazard_exposure_risk: float = RISK_TYPE + vulnerability_risk: float = RISK_TYPE + coping_capacity_risk: float = RISK_TYPE - meta_missing_indicators_pct: Optional[float] = None - meta_avg_recentness_years: Optional[float] = None + meta_missing_indicators_pct: Optional[float] = PERCENTAGE_TYPE + meta_avg_recentness_years: Optional[float] = Field(ge=0) reference_period_start: Optional[NaiveDatetime] reference_period_end: Optional[NaiveDatetime] - dataset_hdx_stub: str = Field(max_length=128) - dataset_hdx_provider_stub: str = Field(max_length=128) + # dataset_hdx_stub: str = Field(max_length=128) + # dataset_hdx_provider_stub: str = Field(max_length=128) resource_hdx_id: str = Field(max_length=36) + # hapi_updated_date: datetime + # hapi_replaced_date: Optional[datetime] # sector_name: str = Field(max_length=512) diff --git a/hdx_hapi/endpoints/models/operational_presence.py b/hdx_hapi/endpoints/models/operational_presence.py index 17af8a1e..0f3f683c 100644 --- a/hdx_hapi/endpoints/models/operational_presence.py +++ b/hdx_hapi/endpoints/models/operational_presence.py @@ -1,29 +1,23 @@ -from pydantic import ConfigDict, Field, model_validator, NaiveDatetime +from pydantic import ConfigDict, Field, NaiveDatetime from typing import Optional -from hdx_hapi.endpoints.models.base import HapiBaseModel +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins -class OperationalPresenceResponse(HapiBaseModel): - sector_code: str = Field(max_length=32) - dataset_hdx_stub: str = Field(max_length=128) +class OperationalPresenceResponse(HapiBaseModel, HapiModelWithAdmins): + # dataset_hdx_stub: str = Field(max_length=128) resource_hdx_id: str = Field(max_length=36) org_acronym: str = Field(max_length=32) org_name: str = Field(max_length=512) + sector_code: str = Field(max_length=32) sector_name: str = Field(max_length=512) - location_code: str = Field(max_length=128) - location_name: str = Field(max_length=512) - reference_period_start: Optional[NaiveDatetime] + reference_period_start: NaiveDatetime reference_period_end: Optional[NaiveDatetime] - admin1_is_unspecified: bool = Field(exclude=True) - admin2_is_unspecified: bool = Field(exclude=True) + # hapi_updated_date: datetime + # hapi_replaced_date: Optional[datetime] - admin1_code: Optional[str] = Field(max_length=128) - admin1_name: Optional[str] = Field(max_length=512) - admin2_code: Optional[str] = Field(max_length=128) - admin2_name: Optional[str] = Field(max_length=512) # resource_update_date: datetime # org_ref: int = None, # dataset_hdx_id: str = Field(max_length=36), @@ -31,25 +25,7 @@ class OperationalPresenceResponse(HapiBaseModel): # dataset_hdx_provider_stub: str = Field(max_length=128), # dataset_hdx_provider_name: str = Field(max_length=512), # resource_name: str = Field(max_length=256), - # org_type_code: str = Field(max_length=32), - # org_type_description: str = Field(max_length=512), - + org_type_code: Optional[str] = Field(max_length=32) + org_type_description: Optional[str] = Field(max_length=512) model_config = ConfigDict(from_attributes=True) - - @model_validator(mode='after') - def set_admin1_admin2_null(self) -> 'OperationalPresenceResponse': - admin1_is_unspecified = self.admin1_is_unspecified - admin2_is_unspecified = self.admin2_is_unspecified - - # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None - if admin1_is_unspecified: - self.admin1_code = None - self.admin1_name = None - - # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None - if admin2_is_unspecified: - self.admin2_code = None - self.admin2_name = None - - return self diff --git a/hdx_hapi/endpoints/models/population.py b/hdx_hapi/endpoints/models/population.py index 3736e4a0..00c8af5a 100644 --- a/hdx_hapi/endpoints/models/population.py +++ b/hdx_hapi/endpoints/models/population.py @@ -1,45 +1,22 @@ -from pydantic import ConfigDict, Field, model_validator, NaiveDatetime +from pydantic import ConfigDict, Field, NaiveDatetime from typing import Optional -from hdx_hapi.endpoints.models.base import HapiBaseModel +from hapi_schema.utils.enums import Gender +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins -class PopulationResponse(HapiBaseModel): - gender_code: Optional[str] = Field(max_length=1) - age_range_code: Optional[str] = Field(max_length=32) +class PopulationResponse(HapiBaseModel, HapiModelWithAdmins): + resource_hdx_id: str = Field(max_length=36) + admin2_ref: int = None + + gender: Optional[Gender] = Field() + age_range: Optional[str] = Field(max_length=32) + + min_age: Optional[int] + max_age: Optional[int] population: int reference_period_start: Optional[NaiveDatetime] reference_period_end: Optional[NaiveDatetime] - dataset_hdx_stub: str = Field(max_length=128) - resource_hdx_id: str = Field(max_length=36) - location_code: str = Field(max_length=128) - location_name: str = Field(max_length=512) - - admin1_is_unspecified: bool = Field(exclude=True) - admin2_is_unspecified: bool = Field(exclude=True) - - admin1_code: Optional[str] = Field(max_length=128) - admin1_name: Optional[str] = Field(max_length=512) - admin2_code: Optional[str] = Field(max_length=128) - admin2_name: Optional[str] = Field(max_length=512) - model_config = ConfigDict(from_attributes=True) - - @model_validator(mode='after') - def set_admin1_admin2_null(self) -> 'PopulationResponse': - admin1_is_unspecified = self.admin1_is_unspecified - admin2_is_unspecified = self.admin2_is_unspecified - - # If 'admin1_is_unspecified' is True, set 'admin1_code' and 'admin1_name' to None - if admin1_is_unspecified: - self.admin1_code = None - self.admin1_name = None - - # If 'admin2_is_unspecified' is True, set 'admin2_code' and 'admin2_name' to None - if admin2_is_unspecified: - self.admin2_code = None - self.admin2_name = None - - return self diff --git a/hdx_hapi/endpoints/models/population_profile.py b/hdx_hapi/endpoints/models/population_profile.py deleted file mode 100644 index 0e83b104..00000000 --- a/hdx_hapi/endpoints/models/population_profile.py +++ /dev/null @@ -1,17 +0,0 @@ -from pydantic import ConfigDict, Field - -from hdx_hapi.endpoints.models.base import HapiBaseModel - - -class PopulationGroupResponse(HapiBaseModel): - code: str = Field(max_length=32) - description: str = Field(max_length=512) - - model_config = ConfigDict(from_attributes=True) - - -class PopulationStatusResponse(HapiBaseModel): - code: str = Field(max_length=32) - description: str = Field(max_length=512) - - model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/poverty_rate.py b/hdx_hapi/endpoints/models/poverty_rate.py new file mode 100644 index 00000000..07f88d07 --- /dev/null +++ b/hdx_hapi/endpoints/models/poverty_rate.py @@ -0,0 +1,23 @@ +from pydantic import ConfigDict, Field, NaiveDatetime +from typing import Optional + +from hdx_hapi.endpoints.models.base import HapiBaseModel + + +class PovertyRateResponse(HapiBaseModel): + resource_hdx_id: str = Field(max_length=36) + + mpi: float + headcount_ratio: float + intensity_of_deprivation: float + vulnerable_to_poverty: float + in_severe_poverty: float + + reference_period_start: Optional[NaiveDatetime] + reference_period_end: Optional[NaiveDatetime] + + location_code: str = Field(max_length=128) + location_name: str = Field(max_length=512) + admin1_name: Optional[str] = Field(max_length=512) + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/refugees.py b/hdx_hapi/endpoints/models/refugees.py new file mode 100644 index 00000000..8cc42206 --- /dev/null +++ b/hdx_hapi/endpoints/models/refugees.py @@ -0,0 +1,25 @@ +from pydantic import ConfigDict, Field, NaiveDatetime +from typing import Optional + +from hdx_hapi.endpoints.models.base import HapiBaseModel +from hapi_schema.utils.enums import Gender, PopulationGroup + + +class RefugeesResponse(HapiBaseModel): + resource_hdx_id: str = Field(max_length=36) + origin_location_ref: int + asylum_location_ref: int + population_group: PopulationGroup + gender: Gender + age_range: str = Field(max_length=32) + min_age: Optional[int] = Field(ge=0) + max_age: Optional[int] = Field(ge=0) + population: int = Field(ge=0) + reference_period_start: NaiveDatetime + reference_period_end: Optional[NaiveDatetime] + origin_location_code: str = Field(max_length=128) + origin_location_name: str = Field(max_length=512) + asylum_location_code: str = Field(max_length=128) + asylum_location_name: str = Field(max_length=512) + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/util/__init__.py b/hdx_hapi/endpoints/models/util/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hdx_hapi/endpoints/models/util/constants.py b/hdx_hapi/endpoints/models/util/constants.py new file mode 100644 index 00000000..2e1df41c --- /dev/null +++ b/hdx_hapi/endpoints/models/util/constants.py @@ -0,0 +1,11 @@ +from decimal import Decimal +from typing import Annotated +from fastapi import Query +from pydantic import Field, PlainSerializer + + +PERCENTAGE_TYPE = Field(ge=0, le=100) + +NON_NEGATIVE_DECIMAL_TYPE = Annotated[ + Decimal, PlainSerializer(lambda x: float(x), return_type=Annotated[float, Query(ge=0)]) +] diff --git a/hdx_hapi/endpoints/models/version.py b/hdx_hapi/endpoints/models/version.py new file mode 100644 index 00000000..beda8a74 --- /dev/null +++ b/hdx_hapi/endpoints/models/version.py @@ -0,0 +1,7 @@ +from pydantic import Field +from hdx_hapi.endpoints.models.base import HapiBaseModel + + +class VersionResponse(HapiBaseModel): + api_version: str = Field(max_length=16) + hapi_sqlalchemy_schema_version: str = Field(max_length=16) diff --git a/hdx_hapi/endpoints/models/wfp_commodity.py b/hdx_hapi/endpoints/models/wfp_commodity.py new file mode 100644 index 00000000..5f8947f5 --- /dev/null +++ b/hdx_hapi/endpoints/models/wfp_commodity.py @@ -0,0 +1,11 @@ +from hapi_schema.utils.enums import CommodityCategory +from pydantic import ConfigDict, Field +from hdx_hapi.endpoints.models.base import HapiBaseModel + + +class WfpCommodityResponse(HapiBaseModel): + code: str = Field(max_length=32) + category: CommodityCategory + name: str = Field(max_length=512) + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/models/wfp_market.py b/hdx_hapi/endpoints/models/wfp_market.py new file mode 100644 index 00000000..2bf482fd --- /dev/null +++ b/hdx_hapi/endpoints/models/wfp_market.py @@ -0,0 +1,12 @@ +from pydantic import ConfigDict, Field +from hdx_hapi.endpoints.models.base import HapiBaseModel, HapiModelWithAdmins + + +class WfpMarketResponse(HapiBaseModel, HapiModelWithAdmins): + code: str = Field(max_length=32) + name: str = Field(max_length=512) + + lat: float = Field(ge=-90.0, le=90.0) + lon: float = Field(ge=-180.0, le=180.0) + + model_config = ConfigDict(from_attributes=True) diff --git a/hdx_hapi/endpoints/util/util.py b/hdx_hapi/endpoints/util/util.py index a4cdbdbc..424d5630 100644 --- a/hdx_hapi/endpoints/util/util.py +++ b/hdx_hapi/endpoints/util/util.py @@ -1,20 +1,87 @@ +from datetime import date from enum import Enum -from typing import Annotated +from typing import Annotated, Optional -from fastapi import Query +from fastapi import Depends, Query +from pydantic import BaseModel, ConfigDict, NaiveDatetime _LIMIT_DESCRIPTION = 'Maximum number of records to return. The system will not return more than 10,000 records.' _OFFSET_DESCRIPTION = ( - 'Number of records to skip in the response. Use in conjunction with the limit parameter ' 'to paginate.' + 'Number of records to skip in the response. Use in conjunction with the limit parameter to paginate.' ) +_APP_IDENTIFIER_DESCRIPTION = ( + 'base64 encoded application name and email, as in `base64("app_name:email")`. ' + 'This value can also be passed in the `X-HDX-HAPI-APP-IDENTIFIER` header. ' + 'See the *encoded_app_identifier* endpoint.' +) + +app_name_identifier_query = Query(max_length=512, min_length=4, description='A name for the calling application') +email_identifier_query = Query(max_length=512, description='An email address') + +pagination_limit_query = Query(ge=0, le=10000, example=100, description=_LIMIT_DESCRIPTION) +pagination_offset_query = Query(ge=0, description=_OFFSET_DESCRIPTION) +common_app_identifier_query = Query(max_length=512, description=_APP_IDENTIFIER_DESCRIPTION) + + +class PaginationParams(BaseModel): + offset: int = pagination_offset_query + limit: int = pagination_limit_query + + model_config = ConfigDict(frozen=True) + + +class CommonEndpointParams(PaginationParams): + app_identifier: Optional[str] = common_app_identifier_query async def pagination_parameters( - limit: Annotated[int, Query(ge=0, le=10000, example=1000, description=_LIMIT_DESCRIPTION)] = 10000, - offset: Annotated[int, Query(ge=0, description=_OFFSET_DESCRIPTION)] = 0, -): - return {'offset': offset, 'limit': limit} + limit: Annotated[int, pagination_limit_query] = 10000, + offset: Annotated[int, pagination_offset_query] = 0, +) -> PaginationParams: + return PaginationParams(offset=offset, limit=limit) + + +async def common_endpoint_parameters( + pagination_parameters: Annotated[PaginationParams, Depends(pagination_parameters)], + app_identifier: Annotated[str, common_app_identifier_query] = None, +) -> CommonEndpointParams: + return CommonEndpointParams(**pagination_parameters.model_dump(), app_identifier=app_identifier) + + +class ReferencePeriodParameters(BaseModel): + reference_period_start_min: Optional[NaiveDatetime | date] = None + reference_period_start_max: Optional[NaiveDatetime | date] = None + reference_period_end_min: Optional[NaiveDatetime | date] = None + reference_period_end_max: Optional[NaiveDatetime | date] = None + + model_config = ConfigDict(frozen=True) + + +async def reference_period_parameters( + reference_period_start_min: Annotated[ + NaiveDatetime | date, + Query(description='Min date of reference start date, e.g. 2020-01-01 or 2020-01-01T00:00:00'), + ] = None, + reference_period_start_max: Annotated[ + NaiveDatetime | date, + Query(description='Max date of reference start date, e.g. 2020-01-01 or 2020-01-01T00:00:00'), + ] = None, + reference_period_end_min: Annotated[ + NaiveDatetime | date, + Query(description='Min date of reference end date, e.g. 2020-01-01 or 2020-01-01T00:00:00'), + ] = None, + reference_period_end_max: Annotated[ + NaiveDatetime | date, + Query(description='Max date of reference end date, e.g. 2020-01-01 or 2020-01-01T00:00:00'), + ] = None, +) -> ReferencePeriodParameters: + return ReferencePeriodParameters( + reference_period_start_min=reference_period_start_min, + reference_period_start_max=reference_period_start_max, + reference_period_end_min=reference_period_end_min, + reference_period_end_max=reference_period_end_max, + ) class OutputFormat(str, Enum): diff --git a/hdx_hapi/endpoints/util/version.py b/hdx_hapi/endpoints/util/version.py new file mode 100644 index 00000000..8e16b6d6 --- /dev/null +++ b/hdx_hapi/endpoints/util/version.py @@ -0,0 +1,2 @@ +api_version = '0.3.0' +hapi_sqlalchemy_schema_version = '0.8.9' diff --git a/hdx_hapi/services/admin1_logic.py b/hdx_hapi/services/admin1_logic.py index b8068d5d..6f13f8a7 100644 --- a/hdx_hapi/services/admin1_logic.py +++ b/hdx_hapi/services/admin1_logic.py @@ -1,13 +1,12 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.admin1_view_dao import admin1_view_list - +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters async def get_admin1_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, @@ -16,9 +15,10 @@ async def get_admin1_srv( ): return await admin1_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, code=code, name=name, location_code=location_code, - location_name=location_name + location_name=location_name, ) diff --git a/hdx_hapi/services/admin2_logic.py b/hdx_hapi/services/admin2_logic.py index 0ab52ef3..3575092f 100644 --- a/hdx_hapi/services/admin2_logic.py +++ b/hdx_hapi/services/admin2_logic.py @@ -1,13 +1,12 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.admin2_view_dao import admin2_view_list - +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters async def get_admin2_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, @@ -18,6 +17,7 @@ async def get_admin2_srv( ): return await admin2_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, code=code, name=name, diff --git a/hdx_hapi/services/admin_level_logic.py b/hdx_hapi/services/admin_level_logic.py index 154f6bc6..1fa34770 100644 --- a/hdx_hapi/services/admin_level_logic.py +++ b/hdx_hapi/services/admin_level_logic.py @@ -1,7 +1,8 @@ +from typing import Optional from hdx_hapi.endpoints.util.util import AdminLevel -def compute_unspecified_values(admin_level: AdminLevel): +def compute_unspecified_values(admin_level: Optional[AdminLevel]): """ Compute unspecified values for admin1 and admin2 """ diff --git a/hdx_hapi/services/age_range_logic.py b/hdx_hapi/services/age_range_logic.py deleted file mode 100644 index c8bedc1c..00000000 --- a/hdx_hapi/services/age_range_logic.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.db.dao.age_range_view_dao import age_ranges_view_list - - -async def get_age_ranges_srv( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, -): - return await age_ranges_view_list( - pagination_parameters=pagination_parameters, - db=db, - code=code, - ) diff --git a/hdx_hapi/services/conflict_view_logic.py b/hdx_hapi/services/conflict_view_logic.py new file mode 100644 index 00000000..c46cc722 --- /dev/null +++ b/hdx_hapi/services/conflict_view_logic.py @@ -0,0 +1,45 @@ +from typing import Optional, Sequence +from hapi_schema.utils.enums import EventType +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.conflict_event_view_dao import conflict_event_view_list +from hdx_hapi.db.models.views.all_views import ConflictEventView +from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters +from hdx_hapi.services.admin_level_logic import compute_unspecified_values + + +async def get_conflict_event_srv( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + event_type: Optional[EventType] = None, + location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin_level: Optional[AdminLevel] = None, +) -> Sequence[ConflictEventView]: + admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) + + return await conflict_event_view_list( + pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + event_type=event_type, + location_ref=location_ref, + location_code=location_code, + location_name=location_name, + admin1_ref=admin1_ref, + admin1_code=admin1_code, + admin1_name=admin1_name, + admin1_is_unspecified=admin1_is_unspecified, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin2_is_unspecified=admin2_is_unspecified, + ) diff --git a/hdx_hapi/services/csv_transform_logic.py b/hdx_hapi/services/csv_transform_logic.py index 75889824..85113458 100644 --- a/hdx_hapi/services/csv_transform_logic.py +++ b/hdx_hapi/services/csv_transform_logic.py @@ -1,7 +1,7 @@ import csv import io -from typing import Dict, List, Type +from typing import Dict, Sequence, Type from fastapi.responses import StreamingResponse from hdx_hapi.endpoints.models.base import HapiBaseModel @@ -13,17 +13,16 @@ def transform_result_to_csv_stream_if_requested( - result: List[Dict], - output_format: OutputFormat, - pydantic_class: Type[HapiBaseModel] - ) -> List[Dict] | StreamingResponse: + result: Sequence[Dict], output_format: OutputFormat, pydantic_class: Type[HapiBaseModel] +) -> Dict[str, Sequence] | StreamingResponse: """ Transforms the result to a CSV stream if requested. Otherwise, returns the result as is """ if output_format == OutputFormat.CSV: if result: - def iter_csv(): + + def iter_csv(): pydantic_instance = pydantic_class.model_validate(result[0]) keys = pydantic_instance.list_of_fields() items_per_row = len(keys) @@ -47,12 +46,11 @@ def iter_csv(): break csv_row = str_as_file.getvalue() - yield csv_row + yield csv_row response = StreamingResponse(iter_csv(), media_type='text/csv') response.headers['Content-Disposition'] = 'attachment; filename=results.csv' return response return StreamingResponse(iter([]), media_type='text/csv') - return result - + return {'data': result} diff --git a/hdx_hapi/services/currency_logic.py b/hdx_hapi/services/currency_logic.py new file mode 100644 index 00000000..5c11580f --- /dev/null +++ b/hdx_hapi/services/currency_logic.py @@ -0,0 +1,17 @@ +from typing import Optional +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.currency_view_dao import currencies_view_list +from hdx_hapi.endpoints.util.util import PaginationParams + + +async def get_currencies_srv( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, +): + return await currencies_view_list( + pagination_parameters=pagination_parameters, + db=db, + code=code, + ) diff --git a/hdx_hapi/services/dataset_logic.py b/hdx_hapi/services/dataset_logic.py index 22599c01..48cf2d3a 100644 --- a/hdx_hapi/services/dataset_logic.py +++ b/hdx_hapi/services/dataset_logic.py @@ -1,20 +1,25 @@ -from typing import Dict - +from typing import Optional from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.dataset_view_dao import datasets_view_list +from hdx_hapi.endpoints.util.util import PaginationParams async def get_datasets_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, - hdx_id: str, - hdx_stub: str, - title: str = None, - hdx_provider_stub: str = None, - hdx_provider_name: str = None, + dataset_hdx_id: Optional[str] = None, + dataset_hdx_stub: Optional[str] = None, + dataset_hdx_title: Optional[str] = None, + hdx_provider_stub: Optional[str] = None, + hdx_provider_name: Optional[str] = None, ): return await datasets_view_list( - pagination_parameters=pagination_parameters, db=db, hdx_id=hdx_id, hdx_stub=hdx_stub, title=title, - hdx_provider_stub=hdx_provider_stub, hdx_provider_name=hdx_provider_name + pagination_parameters=pagination_parameters, + db=db, + dataset_hdx_id=dataset_hdx_id, + dataset_hdx_stub=dataset_hdx_stub, + dataset_hdx_title=dataset_hdx_title, + hdx_provider_stub=hdx_provider_stub, + hdx_provider_name=hdx_provider_name, ) diff --git a/hdx_hapi/services/food_price_logic.py b/hdx_hapi/services/food_price_logic.py new file mode 100644 index 00000000..c8ceb127 --- /dev/null +++ b/hdx_hapi/services/food_price_logic.py @@ -0,0 +1,62 @@ +from decimal import Decimal +from typing import Optional, Sequence +from hapi_schema.utils.enums import CommodityCategory, PriceFlag, PriceType +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.food_price_dao import food_price_view_list +from hdx_hapi.db.models.views.all_views import FoodPriceView +from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams +from hdx_hapi.services.admin_level_logic import compute_unspecified_values + + +async def get_food_prices_srv( + pagination_parameters: PaginationParams, + db: AsyncSession, + market_code: Optional[str] = None, + market_name: Optional[str] = None, + commodity_code: Optional[str] = None, + commodity_category: Optional[CommodityCategory] = None, + commodity_name: Optional[str] = None, + price_flag: Optional[PriceFlag] = None, + price_type: Optional[PriceType] = None, + price_min: Optional[Decimal] = None, + price_max: Optional[Decimal] = None, + # lat: Optional[float] = None, + # lon: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin_level: Optional[AdminLevel] = None, +) -> Sequence[FoodPriceView]: + admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) + + return await food_price_view_list( + pagination_parameters=pagination_parameters, + db=db, + market_code=market_code, + market_name=market_name, + commodity_code=commodity_code, + commodity_category=commodity_category, + commodity_name=commodity_name, + price_flag=price_flag, + price_type=price_type, + price_min=price_min, + price_max=price_max, + location_ref=location_ref, + location_code=location_code, + location_name=location_name, + admin1_ref=admin1_ref, + admin1_code=admin1_code, + admin1_name=admin1_name, + admin1_is_unspecified=admin1_is_unspecified, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin2_is_unspecified=admin2_is_unspecified, + ) diff --git a/hdx_hapi/services/food_security_logic.py b/hdx_hapi/services/food_security_logic.py index 6b03296a..3e8d2c3a 100644 --- a/hdx_hapi/services/food_security_logic.py +++ b/hdx_hapi/services/food_security_logic.py @@ -1,45 +1,44 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.food_security_view_dao import food_security_view_list -from hdx_hapi.endpoints.util.util import AdminLevel +from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, ReferencePeriodParameters from hdx_hapi.services.admin_level_logic import compute_unspecified_values async def get_food_security_srv( - pagination_parameters: Dict, + ref_period_parameters: ReferencePeriodParameters, + pagination_parameters: CommonEndpointParams, db: AsyncSession, - ipc_phase_code: str = None, - ipc_type_code: str = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min=None, - resource_update_date_max=None, + ipc_phase: str = None, + ipc_type: str = None, location_code: str = None, location_name: str = None, admin1_name: str = None, admin1_code: str = None, + location_ref: int = None, + admin2_ref: int = None, admin2_code: str = None, admin2_name: str = None, + admin1_ref: int = None, admin_level: AdminLevel = None, ): admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) return await food_security_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, - ipc_phase_code=ipc_phase_code, - ipc_type_code=ipc_type_code, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + ipc_phase=ipc_phase, + ipc_type=ipc_type, location_code=location_code, location_name=location_name, admin1_name=admin1_name, admin1_code=admin1_code, admin1_is_unspecified=admin1_is_unspecified, + location_ref=location_ref, + admin2_ref=admin2_ref, admin2_code=admin2_code, admin2_name=admin2_name, admin2_is_unspecified=admin2_is_unspecified, + admin1_ref=admin1_ref, ) - diff --git a/hdx_hapi/services/funding_logic.py b/hdx_hapi/services/funding_logic.py new file mode 100644 index 00000000..b0db1c9b --- /dev/null +++ b/hdx_hapi/services/funding_logic.py @@ -0,0 +1,34 @@ +from typing import Optional, Sequence +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.funding_view_dao import funding_view_list +from hdx_hapi.db.models.views.all_views import FundingView +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +async def get_funding_srv( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + appeal_code: Optional[str] = None, + appeal_type: Optional[str] = None, + org_acronym: Optional[str] = None, + org_name: Optional[str] = None, + sector_name: Optional[str] = None, + # location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, +) -> Sequence[FundingView]: + + return await funding_view_list( + pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + appeal_code=appeal_code, + appeal_type=appeal_type, + org_acronym=org_acronym, + org_name=org_name, + sector_name=sector_name, + location_code=location_code, + location_name=location_name, + ) diff --git a/hdx_hapi/services/gender_logic.py b/hdx_hapi/services/gender_logic.py deleted file mode 100644 index eec908f5..00000000 --- a/hdx_hapi/services/gender_logic.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.db.dao.gender_view_dao import genders_view_list - -async def get_genders_srv( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None -): - return await genders_view_list( - pagination_parameters=pagination_parameters, - db=db, - code=code, - description=description - ) diff --git a/hdx_hapi/services/hdx_url_logic.py b/hdx_hapi/services/hdx_url_logic.py index 041319c2..f2696a25 100644 --- a/hdx_hapi/services/hdx_url_logic.py +++ b/hdx_hapi/services/hdx_url_logic.py @@ -7,73 +7,76 @@ CONFIG = get_config() + def get_dataset_url(dataset_id: str) -> HttpUrl: """Creates the full HDX URL for a dataset - + Args: - context (Context): + context (Context): dataset_id (str): Dataset id or name Returns: str: HDX URL for the specified dataset - """ + """ domain = CONFIG.HDX_DOMAIN dataset_url = CONFIG.HDX_DATASET_URL if not domain: logger.warning('HDX_DOMAIN environment variable is not set.') - url=dataset_url.format(domain=domain, dataset_id=dataset_id) + url = dataset_url.format(domain=domain, dataset_id=dataset_id) return HttpUrl(url=url) + def get_dataset_api_url(dataset_id: str) -> HttpUrl: """Creates the full HDX API URL for a dataset - + Args: - context (Context): + context (Context): dataset_id (str): Dataset id or name Returns: str: HDX API URL for the specified dataset (package_show) - """ + """ domain = CONFIG.HDX_DOMAIN dataset_api_url = CONFIG.HDX_DATASET_API_URL if not domain: logger.warning('HDX_DOMAIN environment variable is not set.') - url=dataset_api_url.format(domain=domain, dataset_id=dataset_id) + url = dataset_api_url.format(domain=domain, dataset_id=dataset_id) return HttpUrl(url=url) def get_resource_url(dataset_id: str, resource_id: str) -> HttpUrl: """Creates the full HDX URL for a dataset - + Args: - context (Context): + context (Context): dataset_id (str): Dataset id or name Returns: str: HDX URL for the specified dataset - """ + """ domain = CONFIG.HDX_DOMAIN resource_url = CONFIG.HDX_RESOURCE_URL if not domain: logger.warning('HDX_DOMAIN environment variable is not set.') - url=resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id) + url = resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id) return HttpUrl(url=url) + def get_resource_api_url(resource_id: str) -> HttpUrl: """Creates the full HDX API URL for a dataset - + Args: - context (Context): + context (Context): dataset_id (str): Dataset id or name Returns: str: HDX API URL for the specified dataset (package_show) - """ + """ domain = CONFIG.HDX_DOMAIN resource_api_url = CONFIG.HDX_RESOURCE_API_URL if not domain: logger.warning('HDX_DOMAIN environment variable is not set.') - url=resource_api_url.format(domain=domain, resource_id=resource_id) + url = resource_api_url.format(domain=domain, resource_id=resource_id) return HttpUrl(url=url) @@ -81,16 +84,34 @@ def get_organization_url(org_id: str) -> HttpUrl: """Creates the full HDX URL for an organization Args: - context (Context): + context (Context): org_id (str): Organization id or name Returns: str: HDX URL for the specified organization - """ + """ domain = CONFIG.HDX_DOMAIN organization_url = CONFIG.HDX_ORGANIZATION_URL if not domain: logger.warning('HDX_DOMAIN environment variable is not set.') - + url = organization_url.format(domain=domain, org_id=org_id) - return HttpUrl(url=url) \ No newline at end of file + return HttpUrl(url=url) + + +def get_organization_api_url(org_id: str) -> HttpUrl: + """Creates the full HDX API URL for an organization + + Args: + context (Context): + org_id (str): Organization id or name + Returns: + str: HDX API URL for the specified organization (package_show) + """ + domain = CONFIG.HDX_DOMAIN + organization_api_url = CONFIG.HDX_ORGANIZATION_API_URL + if not domain: + logger.warning('HDX_DOMAIN environment variable is not set.') + + url = organization_api_url.format(domain=domain, org_id=org_id) + return HttpUrl(url=url) diff --git a/hdx_hapi/services/humanitarian_needs_logic.py b/hdx_hapi/services/humanitarian_needs_logic.py index f6263a5d..df46225e 100644 --- a/hdx_hapi/services/humanitarian_needs_logic.py +++ b/hdx_hapi/services/humanitarian_needs_logic.py @@ -1,57 +1,64 @@ -from typing import Dict - +# from datetime import datetime +from typing import Optional, Sequence from sqlalchemy.ext.asyncio import AsyncSession +from hdx_hapi.db.models.views.all_views import HumanitarianNeedsView from hdx_hapi.db.dao.humanitarian_needs_view_dao import humanitarian_needs_view_list -from hdx_hapi.endpoints.util.util import AdminLevel from hdx_hapi.services.admin_level_logic import compute_unspecified_values +from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters +from hapi_schema.utils.enums import DisabledMarker, Gender, PopulationGroup, PopulationStatus async def get_humanitarian_needs_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - gender_code: str = None, - age_range_code: str = None, - disabled_marker: bool = None, - sector_code: str = None, - sector_name: str = None, - population_group_code: str = None, - population_status_code: str = None, - population: int = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min=None, - resource_update_date_max=None, - location_code: str = None, - location_name: str = None, - admin1_code: str = None, - # admin1_name: str = None, - admin2_code: str = None, - admin2_name: str = None, - admin_level: AdminLevel = None, -): + admin2_ref: Optional[int] = None, + gender: Optional[Gender] = None, + age_range: Optional[str] = None, + disabled_marker: Optional[DisabledMarker] = None, + sector_code: Optional[str] = None, + population_group: Optional[PopulationGroup] = None, + population_status: Optional[PopulationStatus] = None, + population_min: Optional[int] = None, + population_max: Optional[int] = None, + sector_name: Optional[str] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + location_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_name: Optional[str] = None, + admin1_is_unspecified: Optional[bool] = None, + admin2_is_unspecified: Optional[bool] = None, + admin_level: Optional[AdminLevel] = None, +) -> Sequence[HumanitarianNeedsView]: admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) return await humanitarian_needs_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, - gender_code=gender_code, - age_range_code=age_range_code, + admin2_ref=admin2_ref, + gender=gender, + age_range=age_range, disabled_marker=disabled_marker, sector_code=sector_code, + population_group=population_group, + population_status=population_status, + population_min=population_min, + population_max=population_max, sector_name=sector_name, - population_group_code=population_group_code, - population_status_code=population_status_code, - population=population, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, location_code=location_code, location_name=location_name, + location_ref=location_ref, admin1_code=admin1_code, - # admin1_name=admin1_name, - admin1_is_unspecified=admin1_is_unspecified, admin2_code=admin2_code, admin2_name=admin2_name, + admin1_ref=admin1_ref, + admin1_name=admin1_name, + admin1_is_unspecified=admin1_is_unspecified, admin2_is_unspecified=admin2_is_unspecified, ) - diff --git a/hdx_hapi/services/location_logic.py b/hdx_hapi/services/location_logic.py index 8d0cee70..407dcf9f 100644 --- a/hdx_hapi/services/location_logic.py +++ b/hdx_hapi/services/location_logic.py @@ -1,18 +1,19 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.location_view_dao import locations_view_list +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters async def get_locations_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, code: str = None, name: str = None, ): return await locations_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, code=code, name=name, diff --git a/hdx_hapi/services/national_risk_logic.py b/hdx_hapi/services/national_risk_logic.py index 45661730..5f696dc7 100644 --- a/hdx_hapi/services/national_risk_logic.py +++ b/hdx_hapi/services/national_risk_logic.py @@ -1,41 +1,45 @@ -from typing import Dict +from typing import Optional +from hapi_schema.utils.enums import RiskClass from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.national_risk_view_dao import national_risks_view_list +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters async def get_national_risks_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - risk_class: int = None, - global_rank: int = None, - overall_risk: float = None, - hazard_exposure_risk: float = None, - vulnerability_risk: float = None, - coping_capacity_risk: float = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min=None, - resource_update_date_max=None, - # sector_name: str = None, - location_code: str = None, - location_name: str = None, + risk_class: Optional[RiskClass] = None, + global_rank_min: Optional[int] = None, + global_rank_max: Optional[int] = None, + overall_risk_min: Optional[float] = None, + overall_risk_max: Optional[float] = None, + hazard_exposure_risk_min: Optional[float] = None, + hazard_exposure_risk_max: Optional[float] = None, + vulnerability_risk_min: Optional[float] = None, + vulnerability_risk_max: Optional[float] = None, + coping_capacity_risk_min: Optional[float] = None, + coping_capacity_risk_max: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, ): - return await national_risks_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, risk_class=risk_class, - global_rank=global_rank, - overall_risk=overall_risk, - hazard_exposure_risk=hazard_exposure_risk, - vulnerability_risk=vulnerability_risk, - coping_capacity_risk=coping_capacity_risk, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, - # sector_name=sector_name, + global_rank_min=global_rank_min, + global_rank_max=global_rank_max, + overall_risk_min=overall_risk_min, + overall_risk_max=overall_risk_max, + hazard_exposure_risk_min=hazard_exposure_risk_min, + hazard_exposure_risk_max=hazard_exposure_risk_max, + vulnerability_risk_min=vulnerability_risk_min, + vulnerability_risk_max=vulnerability_risk_max, + coping_capacity_risk_min=coping_capacity_risk_min, + coping_capacity_risk_max=coping_capacity_risk_max, location_code=location_code, location_name=location_name, ) - diff --git a/hdx_hapi/services/operational_presence_logic.py b/hdx_hapi/services/operational_presence_logic.py index 7c3a6b11..dfba98dc 100644 --- a/hdx_hapi/services/operational_presence_logic.py +++ b/hdx_hapi/services/operational_presence_logic.py @@ -1,51 +1,49 @@ -from typing import Dict - +from typing import Optional from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.operational_presence_view_dao import operational_presences_view_list -from hdx_hapi.endpoints.util.util import AdminLevel +from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams, ReferencePeriodParameters from hdx_hapi.services.admin_level_logic import compute_unspecified_values async def get_operational_presences_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, db: AsyncSession, - sector_code: str = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min=None, - resource_update_date_max=None, - org_acronym: str = None, - org_name: str = None, - sector_name: str = None, - location_code: str = None, - location_name: str = None, - admin1_code: str = None, - admin1_name: str = None, - # admin1_is_unspecified=None, - admin2_code: str = None, - admin2_name: str = None, - admin_level: AdminLevel = None, - # admin2_is_unspecified=None, + sector_code: Optional[str] = None, + org_acronym: Optional[str] = None, + org_name: Optional[str] = None, + sector_name: Optional[str] = None, + location_ref: Optional[int] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin_level: Optional[AdminLevel] = None, ): - admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) return await operational_presences_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, - sector_code=sector_code, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + sector_code=sector_code, org_acronym=org_acronym, org_name=org_name, - sector_name=sector_name, + sector_name=sector_name, + location_ref=location_ref, location_code=location_code, location_name=location_name, - admin1_code=admin1_code, + admin1_ref=admin1_ref, + admin1_code=admin1_code, admin1_name=admin1_name, admin1_is_unspecified=admin1_is_unspecified, - admin2_code=admin2_code, + admin2_ref=admin2_ref, + admin2_code=admin2_code, admin2_name=admin2_name, - admin2_is_unspecified=admin2_is_unspecified + admin2_is_unspecified=admin2_is_unspecified, ) diff --git a/hdx_hapi/services/org_logic.py b/hdx_hapi/services/org_logic.py index fd1b8194..48ce4f8f 100644 --- a/hdx_hapi/services/org_logic.py +++ b/hdx_hapi/services/org_logic.py @@ -1,12 +1,11 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.org_view_dao import orgs_view_list +from hdx_hapi.endpoints.util.util import PaginationParams async def get_orgs_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, acronym: str = None, name: str = None, diff --git a/hdx_hapi/services/org_type_logic.py b/hdx_hapi/services/org_type_logic.py index 1520dc94..942db7b3 100644 --- a/hdx_hapi/services/org_type_logic.py +++ b/hdx_hapi/services/org_type_logic.py @@ -1,13 +1,12 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.org_type_view_dao import org_types_view_list +from hdx_hapi.endpoints.util.util import PaginationParams async def get_org_types_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, code: str = None, description: str = None diff --git a/hdx_hapi/services/population_group_logic.py b/hdx_hapi/services/population_group_logic.py deleted file mode 100644 index 742cd8b5..00000000 --- a/hdx_hapi/services/population_group_logic.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.db.dao.population_group_view_dao import population_groups_view_list - - -async def get_population_groups_srv( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None, -): - return await population_groups_view_list( - pagination_parameters=pagination_parameters, - db=db, - code=code, - description=description, - ) diff --git a/hdx_hapi/services/population_logic.py b/hdx_hapi/services/population_logic.py index 8e685a23..188a6b0e 100644 --- a/hdx_hapi/services/population_logic.py +++ b/hdx_hapi/services/population_logic.py @@ -1,26 +1,28 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession +from hapi_schema.utils.enums import Gender + from hdx_hapi.db.dao.population_view_dao import populations_view_list -from hdx_hapi.endpoints.util.util import AdminLevel +from hdx_hapi.endpoints.util.util import AdminLevel, CommonEndpointParams, ReferencePeriodParameters from hdx_hapi.services.admin_level_logic import compute_unspecified_values async def get_populations_srv( - pagination_parameters: Dict, + ref_period_parameters: ReferencePeriodParameters, + pagination_parameters: CommonEndpointParams, db: AsyncSession, - gender_code: str = None, - age_range_code: str = None, - population: int = None, - dataset_hdx_provider_stub: str = None, - resource_update_date_min=None, - resource_update_date_max=None, + gender: Gender = None, + age_range: str = None, + population_min: int = None, + population_max: int = None, + location_ref: int = None, location_code: str = None, location_name: str = None, + admin1_ref: int = None, admin1_name: str = None, admin1_code: str = None, # admin1_is_unspecified: bool = None, + admin2_ref: int = None, admin2_code: str = None, admin2_name: str = None, admin_level: AdminLevel = None, @@ -30,20 +32,21 @@ async def get_populations_srv( return await populations_view_list( pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, db=db, - gender_code=gender_code, - age_range_code=age_range_code, - population=population, - dataset_hdx_provider_stub=dataset_hdx_provider_stub, - resource_update_date_min=resource_update_date_min, - resource_update_date_max=resource_update_date_max, + gender=gender, + age_range=age_range, + population_min=population_min, + population_max=population_max, + admin1_ref=admin1_ref, + location_ref=location_ref, location_code=location_code, location_name=location_name, admin1_name=admin1_name, admin1_code=admin1_code, - admin1_is_unspecified=admin1_is_unspecified, - admin2_code=admin2_code, + admin2_ref=admin2_ref, admin2_name=admin2_name, + admin2_code=admin2_code, + admin1_is_unspecified=admin1_is_unspecified, admin2_is_unspecified=admin2_is_unspecified, ) - diff --git a/hdx_hapi/services/population_status_logic.py b/hdx_hapi/services/population_status_logic.py deleted file mode 100644 index 91bb6ffa..00000000 --- a/hdx_hapi/services/population_status_logic.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Dict - -from sqlalchemy.ext.asyncio import AsyncSession - -from hdx_hapi.db.dao.population_status_view_dao import population_statuses_view_list - - -async def get_population_statuses_srv( - pagination_parameters: Dict, - db: AsyncSession, - code: str = None, - description: str = None, -): - return await population_statuses_view_list( - pagination_parameters=pagination_parameters, - db=db, - code=code, - description=description, - ) diff --git a/hdx_hapi/services/poverty_rate_logic.py b/hdx_hapi/services/poverty_rate_logic.py new file mode 100644 index 00000000..917e3faf --- /dev/null +++ b/hdx_hapi/services/poverty_rate_logic.py @@ -0,0 +1,28 @@ +from typing import Optional, Sequence +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.models.views.all_views import PovertyRateView +from hdx_hapi.db.dao.poverty_rate_dao import poverty_rates_view_list +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters + + +async def get_poverty_rates_srv( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + mpi_min: Optional[float] = None, + mpi_max: Optional[float] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_name: Optional[str] = None, +) -> Sequence[PovertyRateView]: + return await poverty_rates_view_list( + pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + mpi_min=mpi_min, + mpi_max=mpi_max, + location_code=location_code, + location_name=location_name, + admin1_name=admin1_name, + ) diff --git a/hdx_hapi/services/refugees_logic.py b/hdx_hapi/services/refugees_logic.py new file mode 100644 index 00000000..2bb53934 --- /dev/null +++ b/hdx_hapi/services/refugees_logic.py @@ -0,0 +1,37 @@ +from typing import Optional, Sequence +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.models.views.all_views import RefugeesView +from hdx_hapi.db.dao.refugees_view_dao import refugees_view_list +from hdx_hapi.endpoints.util.util import PaginationParams, ReferencePeriodParameters +from hapi_schema.utils.enums import Gender, PopulationGroup + + +async def get_refugees_srv( + pagination_parameters: PaginationParams, + ref_period_parameters: ReferencePeriodParameters, + db: AsyncSession, + population_group: Optional[PopulationGroup] = None, + population_min: Optional[int] = None, + population_max: Optional[int] = None, + gender: Optional[Gender] = None, + age_range: Optional[str] = None, + origin_location_code: Optional[str] = None, + origin_location_name: Optional[str] = None, + asylum_location_code: Optional[str] = None, + asylum_location_name: Optional[str] = None, +) -> Sequence[RefugeesView]: + return await refugees_view_list( + pagination_parameters=pagination_parameters, + ref_period_parameters=ref_period_parameters, + db=db, + population_group=population_group, + population_min=population_min, + population_max=population_max, + gender=gender, + age_range=age_range, + origin_location_code=origin_location_code, + origin_location_name=origin_location_name, + asylum_location_code=asylum_location_code, + asylum_location_name=asylum_location_name, + ) diff --git a/hdx_hapi/services/resource_logic.py b/hdx_hapi/services/resource_logic.py index 5c6895dd..00a11964 100644 --- a/hdx_hapi/services/resource_logic.py +++ b/hdx_hapi/services/resource_logic.py @@ -1,19 +1,21 @@ -from typing import Dict from datetime import datetime from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.resource_view_dao import resources_view_list +from hdx_hapi.endpoints.util.util import PaginationParams async def get_resources_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, - hdx_id: str = None, + resource_hdx_id: str = None, format: str = None, update_date_min: datetime = None, update_date_max: datetime = None, is_hxl: bool = None, - dataset_title: str = None, + hapi_updated_date_min: datetime = None, + hapi_updated_date_max: datetime = None, + dataset_hdx_title: str = None, dataset_hdx_id: str = None, dataset_hdx_stub: str = None, dataset_hdx_provider_stub: str = None, @@ -22,14 +24,16 @@ async def get_resources_srv( return await resources_view_list( pagination_parameters=pagination_parameters, db=db, - hdx_id=hdx_id, + resource_hdx_id=resource_hdx_id, format=format, update_date_min=update_date_min, update_date_max=update_date_max, is_hxl=is_hxl, + hapi_updated_date_min=hapi_updated_date_min, + hapi_updated_date_max=hapi_updated_date_max, dataset_hdx_id=dataset_hdx_id, dataset_hdx_stub=dataset_hdx_stub, - dataset_title=dataset_title, + dataset_hdx_title=dataset_hdx_title, dataset_hdx_provider_stub=dataset_hdx_provider_stub, dataset_hdx_provider_name=dataset_hdx_provider_name, ) diff --git a/hdx_hapi/services/sector_logic.py b/hdx_hapi/services/sector_logic.py index dc4efa51..4bce85d1 100644 --- a/hdx_hapi/services/sector_logic.py +++ b/hdx_hapi/services/sector_logic.py @@ -1,12 +1,11 @@ -from typing import Dict - from sqlalchemy.ext.asyncio import AsyncSession from hdx_hapi.db.dao.sector_view_dao import sectors_view_list +from hdx_hapi.endpoints.util.util import PaginationParams async def get_sectors_srv( - pagination_parameters: Dict, + pagination_parameters: PaginationParams, db: AsyncSession, code: str = None, name: str = None, diff --git a/hdx_hapi/services/wfp_commodity_logic.py b/hdx_hapi/services/wfp_commodity_logic.py new file mode 100644 index 00000000..5a645854 --- /dev/null +++ b/hdx_hapi/services/wfp_commodity_logic.py @@ -0,0 +1,23 @@ +from typing import Optional, Sequence +from hapi_schema.utils.enums import CommodityCategory +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.wfp_commodity_view_dao import wfp_commodity_view_list +from hdx_hapi.db.models.views.all_views import WfpCommodityView +from hdx_hapi.endpoints.util.util import PaginationParams + + +async def get_wfp_commodities_srv( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, + category: Optional[CommodityCategory] = None, + name: Optional[str] = None +) -> Sequence[WfpCommodityView]: + return await wfp_commodity_view_list( + pagination_parameters=pagination_parameters, + db=db, + code=code, + category=category, + name=name, + ) diff --git a/hdx_hapi/services/wfp_market_logic.py b/hdx_hapi/services/wfp_market_logic.py new file mode 100644 index 00000000..7485c2cb --- /dev/null +++ b/hdx_hapi/services/wfp_market_logic.py @@ -0,0 +1,45 @@ +from typing import Optional, Sequence +from sqlalchemy.ext.asyncio import AsyncSession + +from hdx_hapi.db.dao.wfp_market_view_dao import wfp_market_view_list +from hdx_hapi.db.models.views.all_views import WfpMarketView +from hdx_hapi.endpoints.util.util import AdminLevel, PaginationParams +from hdx_hapi.services.admin_level_logic import compute_unspecified_values + + +async def get_wfp_markets_srv( + pagination_parameters: PaginationParams, + db: AsyncSession, + code: Optional[str] = None, + name: Optional[str] = None, + location_code: Optional[str] = None, + location_name: Optional[str] = None, + admin1_ref: Optional[int] = None, + admin1_code: Optional[str] = None, + admin1_name: Optional[str] = None, + location_ref: Optional[int] = None, + admin2_ref: Optional[int] = None, + admin2_code: Optional[str] = None, + admin2_name: Optional[str] = None, + admin_level: Optional[AdminLevel] = None, +) -> Sequence[WfpMarketView]: + + admin1_is_unspecified, admin2_is_unspecified = compute_unspecified_values(admin_level) + + return await wfp_market_view_list( + pagination_parameters=pagination_parameters, + db=db, + code=code, + name=name, + location_ref=location_ref, + location_code=location_code, + location_name=location_name, + admin1_ref=admin1_ref, + admin1_code=admin1_code, + admin1_name=admin1_name, + admin1_is_unspecified=admin1_is_unspecified, + admin2_ref=admin2_ref, + admin2_code=admin2_code, + admin2_name=admin2_name, + admin2_is_unspecified=admin2_is_unspecified, + ) diff --git a/initialize_db.sh b/initialize_db.sh index 808312e7..a1a3fc54 100755 --- a/initialize_db.sh +++ b/initialize_db.sh @@ -4,7 +4,7 @@ DB_USER="${HAPI_DB_USER:-hapi}" DB_PASS="${HAPI_DB_PASSWORD:-hapi}" DB_HOST="${HAPI_DB_HOST:-db}" DB_PORT="${HAPI_DB_PORT:-5432}" -ALEMBIC_COMMIT="${ALEMBIC_COMMIT:-5ea41}" +ALEMBIC_COMMIT="${ALEMBIC_COMMIT:-head}" cd docker docker-compose exec -T db psql -U postgres -c "create database $DB_NAME with encoding 'UTF8';" @@ -14,6 +14,6 @@ docker-compose exec -T db psql -U postgres -c "grant all privileges on database docker-compose exec -T db psql -U postgres $DB_NAME -c "GRANT USAGE, CREATE ON SCHEMA public TO $DB_USER;" docker-compose exec -T db psql -U postgres $DB_NAME -c "GRANT ALL ON ALL TABLES IN SCHEMA public TO $DB_USER;" -docker-compose exec -T hapi sh -c "alembic -x sqlalchemy.url=postgresql+psycopg2://$DB_USER:$DB_PASS@$DB_HOST:$DB_PORT/$DB_NAME upgrade $ALEMBIC_COMMIT" +# docker-compose exec -T hapi sh -c "alembic -x sqlalchemy.url=postgresql+psycopg2://$DB_USER:$DB_PASS@$DB_HOST:$DB_PORT/$DB_NAME upgrade $ALEMBIC_COMMIT" cd .. \ No newline at end of file diff --git a/main.py b/main.py index aa3fd924..e47e0a2f 100644 --- a/main.py +++ b/main.py @@ -5,51 +5,105 @@ import uvicorn # noqa from fastapi import FastAPI, Request # noqa +from fastapi.exceptions import ResponseValidationError # noqa from fastapi.responses import HTMLResponse, RedirectResponse # noqa from fastapi.openapi.docs import get_swagger_ui_html # noqa # from hdx_hapi.services.sql_alchemy_session import init_db +from hdx_hapi.endpoints.exception_handler.response_validation_error_handler import response_validation_error_handler # noqa +from hdx_hapi.endpoints.middleware.app_identifier_middleware import app_identifier_middleware # noqa +from hdx_hapi.endpoints.middleware.mixpanel_tracking_middleware import mixpanel_tracking_middleware # noqa from hdx_hapi.endpoints.get_encoded_identifier import router as encoded_identifier_router # noqa from hdx_hapi.endpoints.favicon import router as favicon_router # noqa + from hdx_hapi.endpoints.get_population import router as population_router # noqa from hdx_hapi.endpoints.get_operational_presence import router as operational_presence_router # noqa +from hdx_hapi.endpoints.get_funding import router as funding_router # noqa +from hdx_hapi.endpoints.get_conflict_events import router as conflict_events_router # noqa from hdx_hapi.endpoints.get_admin_level import router as admin_level_router # noqa from hdx_hapi.endpoints.get_hdx_metadata import router as dataset_router # noqa from hdx_hapi.endpoints.get_humanitarian_response import router as humanitarian_response_router # noqa -from hdx_hapi.endpoints.get_demographic import router as demographic_router # noqa -from hdx_hapi.endpoints.get_food_security import router as food_security_router # noqa +from hdx_hapi.endpoints.get_affected_people import router as affected_people_router # noqa from hdx_hapi.endpoints.get_national_risk import router as national_risk_router # noqa -from hdx_hapi.endpoints.get_humanitarian_needs import router as humanitarian_needs_router # noqa -from hdx_hapi.endpoints.get_population_profile import router as population_profile_router # noqa +from hdx_hapi.endpoints.get_wfp_commodity import router as wfp_commodity_router # noqa +from hdx_hapi.endpoints.get_wfp_market import router as wfp_market_router # noqa +from hdx_hapi.endpoints.get_currency import router as currency_router # noqa +from hdx_hapi.endpoints.get_food_security import router as food_security_router # noqa +from hdx_hapi.endpoints.get_food_price import router as food_price_router # noqa + +from hdx_hapi.endpoints.get_version import router as version_router # noqa + +from hdx_hapi.endpoints.util.version import api_version # noqa # from hdx_hapi.endpoints.delete_example import delete_dataset +from hdx_hapi.config.config import get_config # noqa logger = logging.getLogger(__name__) -# import os -# logger.warning("Current folder is "+ os.getcwd()) + +CONFIG = get_config() +DESCRIPTION = """ +The Humanitarian API (HDX HAPI) is a service of the +Humanitarian Data Exchange (HDX), part of UNOCHA\'s +Centre for Humanitarian Data. +\nThis is the reference documentation of the API. +You may want to get started here + +All queries require an `app_identifier` which can be supplied as a query parameter or as a header +(`X-HDX-HAPI-APP-IDENTIFIER`). The `app_identifier` is simply a base64 encoded version of a user supplied +application name and email address. + +The `limit` and `offset` parameters are available for all queries and have the usual database meanings +to provide pagination of results. If no `limit` is specified, a maximum of 10,000 records will be returned. + +The `output_format` parameter is available for all queries and can be set to JSON or csv, +where JSON is selected rows of data are supplied under a data key. + +Query parameters that access string fields are implicitly wildcards and case insensitive +so that `location_name=Mali` will return data for Mali and Somalia. +""" # noqa app = FastAPI( - title='HAPI', - description='The Humanitarian API (HAPI) is a service of the Humanitarian Data Exchange (HDX), part of UNOCHA\'s Centre for Humanitarian Data.\nThis is the reference documentation of the API. You may want to get started here', # noqa - version='0.1.0', + title='HDX HAPI', + description=DESCRIPTION, + version=api_version, docs_url=None, + servers=[{'url': CONFIG.HAPI_SERVER_URL}] if CONFIG.HAPI_SERVER_URL else [], ) app.include_router(encoded_identifier_router) app.include_router(favicon_router) app.include_router(operational_presence_router) +app.include_router(funding_router) +app.include_router(conflict_events_router) app.include_router(population_router) -app.include_router(food_security_router) +app.include_router(affected_people_router) app.include_router(national_risk_router) -app.include_router(humanitarian_needs_router) +app.include_router(food_security_router) +app.include_router(food_price_router) app.include_router(admin_level_router) app.include_router(humanitarian_response_router) -app.include_router(demographic_router) -app.include_router(population_profile_router) app.include_router(dataset_router) +app.include_router(wfp_market_router) +app.include_router(wfp_commodity_router) +app.include_router(currency_router) +app.include_router(version_router) + + +# add middleware +@app.middleware('http') +async def app_identifier_middleware_init(request: Request, call_next): + response = await app_identifier_middleware(request, call_next) + return response + + +# add middleware +@app.middleware('http') +async def mixpanel_tracking_middleware_init(request: Request, call_next): + response = await mixpanel_tracking_middleware(request, call_next) + return response @app.on_event('startup') @@ -82,5 +136,10 @@ def home(): return RedirectResponse('/docs') +@app.exception_handler(ResponseValidationError) +async def resp_validation_exception_handler(request: Request, exc: ResponseValidationError): + return await response_validation_error_handler(request, exc) + + if __name__ == '__main__': uvicorn.run(app, host='0.0.0.0', port=8844, log_config='logging.conf') diff --git a/mkdocs.yml b/mkdocs.yml index eda4203e..449ee77c 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,10 +1,11 @@ -site_name: HAPI - The Humanitarian API +site_name: HDX HAPI - The Humanitarian API nav: - Home: index.md - Getting Started: getting-started.md - Data Coverage: data.md - - Examples: examples.md + - Code Examples: examples.md + - Data Usage Guide: subcategory_details.md - Geo Data: geo.md - Contact: contact.md theme: - name: "mkdocs" \ No newline at end of file + name: "material" \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 70440a3d..afb0ac4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ select = [ "E", "F", "Q", + "INP001", # Checks for packages that are missing an __init__.py file. ] extend-ignore = [ "Q003", # avoidable-escaped-quote diff --git a/requirements.txt b/requirements.txt index 695e2ec9..ed232eff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,8 +4,11 @@ sqlalchemy[asyncio]~=2.0.20 asyncpg~=0.28.0 python-json-logger~=2.0.7 email_validator~=2.1.1 +mixpanel==4.10.1 +ua-parser==0.18.0 alembic~=1.12.00 psycopg2~=2.9.7 --e git+https://github.com/OCHA-DAP/hapi-sqlalchemy-schema@v0.6.1#egg=hapi-schema +-e git+https://github.com/OCHA-DAP/hapi-sqlalchemy-schema@v0.8.9#egg=hapi-schema + diff --git a/tests/conftest.py b/tests/conftest.py index 6454d646..a82a6cf5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,16 +5,115 @@ from logging import Logger -from sqlalchemy import create_engine, text +from sqlalchemy import Engine, MetaData, create_engine, inspect, text from sqlalchemy.orm import sessionmaker, Session from typing import List - -SAMPLE_DATA_SQL_FILE = 'alembic/versions/afd54d1a867e_insert_sample_data.sql' +from hapi_schema.db_admin1 import view_params_admin1 +from hapi_schema.db_admin2 import view_params_admin2 +from hapi_schema.db_dataset import view_params_dataset +from hapi_schema.db_food_security import view_params_food_security +from hapi_schema.db_funding import view_params_funding +from hapi_schema.db_humanitarian_needs import view_params_humanitarian_needs +from hapi_schema.db_location import view_params_location +from hapi_schema.db_national_risk import view_params_national_risk +from hapi_schema.db_operational_presence import view_params_operational_presence +from hapi_schema.db_org_type import view_params_org_type +from hapi_schema.db_org import view_params_org +from hapi_schema.db_population import view_params_population +from hapi_schema.db_refugees import view_params_refugees +from hapi_schema.db_resource import view_params_resource +from hapi_schema.db_sector import view_params_sector +from hapi_schema.db_conflict_event import view_params_conflict_event +from hapi_schema.db_poverty_rate import view_params_poverty_rate +from hapi_schema.db_wfp_commodity import view_params_wfp_commodity +from hapi_schema.db_wfp_market import view_params_wfp_market +from hapi_schema.db_currency import view_params_currency +from hapi_schema.db_food_price import view_params_food_price + +from hdx_hapi.config.config import get_config +from hdx_hapi.db.models.base import Base +from hdx_hapi.db.models.views.util.util import CreateView + +SAMPLE_DATA_SQL_FILES = [ + 'tests/sample_data/location_admin.sql', + 'tests/sample_data/sector.sql', + 'tests/sample_data/org_type.sql', + 'tests/sample_data/org.sql', + 'tests/sample_data/dataset_resource.sql', + 'tests/sample_data/population.sql', + 'tests/sample_data/operational_presence.sql', + 'tests/sample_data/funding.sql', + 'tests/sample_data/conflict_event.sql', + 'tests/sample_data/national_risk.sql', + 'tests/sample_data/humanitarian_needs.sql', + 'tests/sample_data/refugees.sql', + 'tests/sample_data/poverty_rate.sql', + 'tests/sample_data/food_security.sql', + 'tests/sample_data/wfp_commodity.sql', + 'tests/sample_data/wfp_market.sql', + 'tests/sample_data/currency.sql', + 'tests/sample_data/food_price.sql', +] + +VIEW_LIST = [ + view_params_admin1, + view_params_admin2, + view_params_location, + view_params_dataset, + view_params_food_security, + view_params_funding, + view_params_humanitarian_needs, + view_params_national_risk, + view_params_operational_presence, + view_params_org_type, + view_params_org, + view_params_population, + view_params_refugees, + view_params_resource, + view_params_sector, + view_params_conflict_event, + view_params_poverty_rate, + view_params_wfp_commodity, + view_params_wfp_market, + view_params_currency, + view_params_food_price, +] def pytest_sessionstart(session): - os.environ['HAPI_DB_NAME'] = 'hapi_test' + os.environ['HAPI_DB_NAME'] = 'hapi_test' + os.environ['HAPI_IDENTIFIER_FILTERING'] = 'False' + os.environ['HDX_MIXPANEL_TOKEN'] = 'fake_token' + + engine = create_engine( + get_config().SQL_ALCHEMY_PSYCOPG2_DB_URI, + ) + _drop_tables_and_views(engine) + _create_tables_and_views(engine) + + +def _create_tables_and_views(engine: Engine): + Base.metadata.create_all(engine) + with engine.connect() as conn: + for v in VIEW_LIST: + conn.execute(CreateView(v.name, v.selectable)) + conn.commit() + + +def _drop_tables_and_views(engine: Engine): + # drop views + inspector = inspect(engine) + views = inspector.get_view_names() + with engine.connect() as conn: + for view in views: + conn.execute(text(f'DROP VIEW IF EXISTS {view}')) + conn.commit() + + # drop tables + metadata = MetaData() + metadata.reflect(bind=engine) + metadata.drop_all(bind=engine) @pytest.fixture(scope='session') @@ -31,10 +130,9 @@ def log(): @pytest.fixture(scope='session') def session_maker() -> sessionmaker[Session]: - # we don't want to import get_config before env vars are set for tests in pytest_sessionstart method from hdx_hapi.config.config import get_config - + engine = create_engine( get_config().SQL_ALCHEMY_PSYCOPG2_DB_URI, ) @@ -44,12 +142,10 @@ def session_maker() -> sessionmaker[Session]: @pytest.fixture(scope='session') def list_of_db_tables(log: Logger, session_maker: sessionmaker[Session]) -> List[str]: - # log.info('Getting list of db tables') + log.info('Getting list of db tables') session = session_maker() try: - result = session.execute( - text('SELECT tablename FROM pg_tables WHERE schemaname = \'public\'') - ) + result = session.execute(text("SELECT tablename FROM pg_tables WHERE schemaname = 'public'")) return [row[0] for row in result if row != 'alembic_version'] except Exception as e: raise e @@ -72,16 +168,19 @@ def clear_db_tables(log: Logger, session_maker: sessionmaker[Session], list_of_d finally: db_session.close() + @pytest.fixture(scope='function') def populate_test_data(log: Logger, session_maker: sessionmaker[Session]): log.info('Populating with test data') db_session = session_maker() try: - with open(SAMPLE_DATA_SQL_FILE, 'r') as file: - sql_commands = file.read() - db_session.execute(text(sql_commands)) - db_session.commit() - log.info('Test data inserted successfully') + for sample_file in SAMPLE_DATA_SQL_FILES: + log.info(f'Starting data insert from {sample_file}') + with open(sample_file, 'r') as file: + sql_commands = file.read() + db_session.execute(text(sql_commands)) + db_session.commit() + log.info(f'Test data inserted successfully from {sample_file}') except Exception as e: log.error(f'Error while inserting test data: {str(e).splitlines()[0]}') db_session.rollback() @@ -89,7 +188,17 @@ def populate_test_data(log: Logger, session_maker: sessionmaker[Session]): finally: db_session.close() + @pytest.fixture(scope='function') def refresh_db(clear_db_tables, populate_test_data): pass + +@pytest.fixture(scope='function') +def enable_hapi_identifier_filtering(): + import hdx_hapi.config.config as config + + initial_config_id_filtering = config.CONFIG.HAPI_IDENTIFIER_FILTERING + config.CONFIG.HAPI_IDENTIFIER_FILTERING = True + yield + config.CONFIG.HAPI_IDENTIFIER_FILTERING = initial_config_id_filtering diff --git a/alembic/versions/afd54d1a867e_insert_sample_data.sql b/tests/sample_data.sql similarity index 92% rename from alembic/versions/afd54d1a867e_insert_sample_data.sql rename to tests/sample_data.sql index a67bf4af..ebb7ece6 100644 --- a/alembic/versions/afd54d1a867e_insert_sample_data.sql +++ b/tests/sample_data.sql @@ -12,17 +12,17 @@ VALUES (1, 'c3f001fa-b45b-464c-9460-1ca79fd39b40', 'dataset01', 'Dataset #1', 'provider01', 'Provider #1'), (2, '7cf3cec8-dbbc-4c96-9762-1464cd0bff75', 'dataset02', 'Dataset #2', 'provider02', 'Provider #2'); -INSERT INTO resource (id, dataset_ref, hdx_id, name, format, update_date, download_url, is_hxl) +INSERT INTO resource (id, dataset_ref, hdx_id, name, format, update_date, download_url, is_hxl, hapi_updated_date) VALUES (1, 1, '90deb235-1bf5-4bae-b231-3393222c2d01', 'resource-01.csv', 'csv', '2023-06-01 00:00:00', 'https://data.humdata.org/dataset/c3f001fa-b45b-464c-9460-1ca79fd39b40/resource/90deb235-1bf5-4bae-b231-3393222c2d01/download/resource-01.csv', - TRUE), + TRUE, '2023-01-01 00:00:00'), (2, 1, 'b9e438e0-b68a-49f9-b9a9-68c0f3e93604', 'resource-02.xlsx', 'xlsx', '2023-07-01 00:00:00', 'https://fdw.fews.net/api/tradeflowquantityvaluefacts/?dataset=1845&country=TZ&fields=simple&format=xlsx', - TRUE), + TRUE, '2023-01-01 00:00:00'), (3, 2, '62ad6e55-5f5d-4494-854c-4110687e9e25', 'resource-03.csv', 'csv', '2023-08-01 00:00:00', 'https://data.humdata.org/dataset/7cf3cec8-dbbc-4c96-9762-1464cd0bff75/resource/62ad6e55-5f5d-4494-854c-4110687e9e25/download/resource-03.csv', - TRUE); + TRUE, '2023-01-01 00:00:00'); -- these are the actual datatypes from -- https://data.humdata.org/dataset/organization-types-beta @@ -43,55 +43,55 @@ INSERT INTO org_type (code, description) VALUES ('447', 'United Nations'); -- dummy data -INSERT INTO org (id, acronym, name, org_type_code, reference_period_start, reference_period_end) VALUES -(1, 'ORG01', 'Organisation 1', '433', '2023-08-01 00:00:00', NULL), -(2, 'ORG02', 'Organisation 2', '437', '2023-07-01 00:00:00', NULL), -(3, 'ORG03', 'Organisation 3', '447', '2023-06-01 00:00:00', NULL); +INSERT INTO org (id, acronym, name, org_type_code, reference_period_start, reference_period_end, hapi_updated_date) VALUES +(1, 'ORG01', 'Organisation 1', '433', '2023-08-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(2, 'ORG02', 'Organisation 2', '437', '2023-07-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(3, 'ORG03', 'Organisation 3', '447', '2023-06-01 00:00:00', NULL, '2023-01-01 00:00:00'); -- these are the actual sector codes from -- https://data.humdata.org/dataset/global-coordination-groups-beta -- (they won't be sufficient for production; we'll have to add to them) -INSERT INTO sector (code, name, reference_period_start, reference_period_end) +INSERT INTO sector (code, name) VALUES -('SHL', 'Emergency Shelter and NFI', '2023-01-01 00:00:00', NULL), -('CCM', 'Camp Coordination / Management', '2023-01-01 00:00:00', NULL), -('PRO-MIN', 'Mine Action', '2023-01-01 00:00:00', NULL), -('FSC', 'Food Security', '2023-01-01 00:00:00', NULL), -('WSH', 'Water Sanitation Hygiene', '2023-01-01 00:00:00', NULL), -('LOG', 'Logistics', '2023-01-01 00:00:00', NULL), -('PRO-CPN', 'Child Protection', '2023-01-01 00:00:00', NULL), -('PRO', 'Protection', '2023-01-01 00:00:00', NULL), -('EDU', 'Education', '2023-01-01 00:00:00', NULL), -('NUT', 'Nutrition', '2023-01-01 00:00:00', NULL), -('HEA', 'Health', '2023-01-01 00:00:00', NULL), -('ERY', 'Early Recovery', '2023-01-01 00:00:00', NULL), -('TEL', 'Emergency Telecommunications', '2023-01-01 00:00:00', NULL), -('PRO-GBV', 'Gender Based Violence', '2023-01-01 00:00:00', NULL), -('PRO-HLP', 'Housing, Land and Property', '2023-01-01 00:00:00', NULL); +('SHL', 'Emergency Shelter and NFI'), +('CCM', 'Camp Coordination / Management'), +('PRO-MIN', 'Mine Action'), +('FSC', 'Food Security'), +('WSH', 'Water Sanitation Hygiene'), +('LOG', 'Logistics'), +('PRO-CPN', 'Child Protection'), +('PRO', 'Protection'), +('EDU', 'Education'), +('NUT', 'Nutrition'), +('HEA', 'Health'), +('ERY', 'Early Recovery'), +('TEL', 'Emergency Telecommunications'), +('PRO-GBV', 'Gender Based Violence'), +('PRO-HLP', 'Housing, Land and Property'); -- dummy data -INSERT INTO location (id, code, name, reference_period_start, reference_period_end) +INSERT INTO location (id, code, name, reference_period_start, reference_period_end, hapi_updated_date) VALUES -(1, 'FOO', 'Foolandia', '2023-01-01 00:00:00', NULL); +(1, 'FOO', 'Foolandia', '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'); -- dummy data -INSERT INTO admin1 (id, location_ref, code, name, is_unspecified, reference_period_start, reference_period_end) +INSERT INTO admin1 (id, location_ref, code, name, is_unspecified, reference_period_start, reference_period_end, hapi_updated_date) VALUES -(1, 1, 'FOO-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), -(2, 1, 'FOO-001', 'Province 01', FALSE, '2023-01-01 00:00:00', NULL), -(3, 1, 'FOO-002', 'Province 02', FALSE, '2023-01-01 00:00:00', NULL); +(1, 1, 'FOO-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(2, 1, 'FOO-001', 'Province 01', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(3, 1, 'FOO-002', 'Province 02', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'); -- dummy data -- note that we need an "Unspecified" for every Admin1, including the unspecified one -INSERT INTO admin2 (id, admin1_ref, code, name, is_unspecified, reference_period_start, reference_period_end) +INSERT INTO admin2 (id, admin1_ref, code, name, is_unspecified, reference_period_start, reference_period_end, hapi_updated_date) VALUES -(1, 1, 'FOO-XXX-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), -(2, 2, 'FOO-001-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), -(3, 3, 'FOO-002-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), -(4, 2, 'FOO-001-A', 'District A', FALSE, '2023-01-01 00:00:00', NULL), -(5, 2, 'FOO-001-B', 'District B', FALSE, '2023-01-01 00:00:00', NULL), -(6, 3, 'FOO-002-C', 'District C', FALSE, '2023-01-01 00:00:00', NULL), -(7, 3, 'FOO-002-D', 'District D', FALSE, '2023-01-01 00:00:00', NULL); +(1, 1, 'FOO-XXX-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(2, 2, 'FOO-001-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(3, 3, 'FOO-002-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(4, 2, 'FOO-001-A', 'District A', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(5, 2, 'FOO-001-B', 'District B', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(6, 3, 'FOO-002-C', 'District C', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'), +(7, 3, 'FOO-002-D', 'District D', FALSE, '2023-01-01 00:00:00', NULL, '2023-01-01 00:00:00'); -- may be OK for production diff --git a/tests/sample_data/conflict_event.sql b/tests/sample_data/conflict_event.sql new file mode 100644 index 00000000..55d6f0a1 --- /dev/null +++ b/tests/sample_data/conflict_event.sql @@ -0,0 +1,3 @@ +INSERT INTO conflict_event (resource_hdx_id, admin2_ref, event_type, events, fatalities, reference_period_start, reference_period_end) VALUES +('a8e69c6c-16fc-4983-92ee-e04e8960b51f', 4, 'political_violence', 10, 2, '2024-01-01', '2024-01-31'), +('a8e69c6c-16fc-4983-92ee-e04e8960b51f', 1, 'civilian_targeting', 3, 0, '2024-01-01', '2024-01-31'); \ No newline at end of file diff --git a/tests/sample_data/currency.sql b/tests/sample_data/currency.sql new file mode 100644 index 00000000..5dd99419 --- /dev/null +++ b/tests/sample_data/currency.sql @@ -0,0 +1,6 @@ +-- dummy data +INSERT INTO currency (code, name) +VALUES +('USD', 'United states dollar'), +('RON', 'Romanian leu'), +('EUR', 'Euro'); diff --git a/tests/sample_data/dataset_resource.sql b/tests/sample_data/dataset_resource.sql new file mode 100644 index 00000000..92a47b1a --- /dev/null +++ b/tests/sample_data/dataset_resource.sql @@ -0,0 +1,20 @@ +-- dummy data +INSERT INTO dataset (hdx_id, hdx_stub, title, hdx_provider_stub, hdx_provider_name) +VALUES +('90deb235-1bf5-4bae-b231-3393222c2d01', 'dataset01', 'Dataset #1', 'provider01', 'Provider #1'), +('b9e438e0-b68a-49f9-b9a9-68c0f3e93604', 'dataset02', 'Dataset #2', 'provider02', 'Provider #2'), +('62ad6e55-5f5d-4494-854c-4110687e9e25', 'dataset03', 'Dataset #3', 'provider03', 'Provider #3'); + +-- dummy data +INSERT INTO resource (hdx_id, dataset_hdx_id, name, format, update_date, is_hxl, download_url, hapi_updated_date) +VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', '90deb235-1bf5-4bae-b231-3393222c2d01', 'resource-01.csv', 'csv', '2023-06-01 00:00:00',TRUE, +'https://data.humdata.org/dataset/c3f001fa-b45b-464c-9460-1ca79fd39b40/resource/90deb235-1bf5-4bae-b231-3393222c2d01/download/resource-01.csv', + '2023-01-01 00:00:00'), +('d1160fa9-1d58-4f96-9df5-edbff2e80895', 'b9e438e0-b68a-49f9-b9a9-68c0f3e93604','resource-02.xlsx', 'xlsx', '2023-07-01 00:00:00',TRUE, +'https://fdw.fews.net/api/tradeflowquantityvaluefacts/?dataset=1845&country=TZ&fields=simple&format=xlsx', + '2023-01-01 00:00:00'), +('a8e69c6c-16fc-4983-92ee-e04e8960b51f', '62ad6e55-5f5d-4494-854c-4110687e9e25', 'resource-03.csv', 'csv', '2023-08-01 00:00:00',TRUE, + 'https://data.humdata.org/dataset/7cf3cec8-dbbc-4c96-9762-1464cd0bff75/resource/62ad6e55-5f5d-4494-854c-4110687e9e25/download/resource-03.csv', + '2023-01-01 00:00:00'); + diff --git a/tests/sample_data/food_price.sql b/tests/sample_data/food_price.sql new file mode 100644 index 00000000..80a756b6 --- /dev/null +++ b/tests/sample_data/food_price.sql @@ -0,0 +1,4 @@ +INSERT INTO food_price (resource_hdx_id, market_code, commodity_code, currency_code, unit, price_flag, price_type, price, reference_period_start, reference_period_end) +VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', '001', '001', 'EUR', 'basket', 'actual', 'Retail', 100.1, '2024-01-02 00:00:00', '2024-12-31 23:59:59.999999'), +('17acb541-9431-409a-80a8-50eda7e8ebab', '002', '001', 'RON', 'kilo', 'aggregate', 'Wholesale', 200.2, '2024-01-02 00:00:00', '2024-12-31 23:59:59.999999'); diff --git a/tests/sample_data/food_security.sql b/tests/sample_data/food_security.sql new file mode 100644 index 00000000..e6538d87 --- /dev/null +++ b/tests/sample_data/food_security.sql @@ -0,0 +1,12 @@ +INSERT INTO public.food_security (resource_hdx_id, admin2_ref, ipc_phase, ipc_type, population_in_phase, population_fraction_in_phase, reference_period_start, reference_period_end) VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '1', 'current', 49348, 0.8399945530060597, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '2', 'current', 8225, 0.1400047661196977, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '3', 'current', 1175, 0.02000068087424253, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '4', 'current', 0, 0, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '5', 'current', 0, 0, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '3+', 'current', 1175, 0.02000068087424253, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, 'all', 'current', 58748, 1, '2021-01-01 00:00:00', '2021-05-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 2, '1', 'first projection', 47586, 0.8100020426227276, '2021-06-01 00:00:00', '2021-08-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 2, '2', 'first projection', 9400, 0.16000544699394023, '2021-06-01 00:00:00', '2021-08-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 4, '3', 'first projection', 1762, 0.0299925103833322, '2021-06-01 00:00:00', '2021-08-31 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 4, '4', 'first projection', 0, 0, '2021-06-01 00:00:00', '2021-08-31 00:00:00'); \ No newline at end of file diff --git a/tests/sample_data/funding.sql b/tests/sample_data/funding.sql new file mode 100644 index 00000000..65da4cb3 --- /dev/null +++ b/tests/sample_data/funding.sql @@ -0,0 +1,4 @@ +-- Funding +INSERT INTO funding (resource_hdx_id, appeal_code, appeal_name, appeal_type, location_ref, requirements_usd, funding_usd, funding_pct, reference_period_start, reference_period_end) +VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', 'HFOO24', 'Foolandia HRP 2024', 'HRP', 1, 100000.3, 50000.7, 50, '2023-01-01 00:00:00', NULL); \ No newline at end of file diff --git a/tests/sample_data/humanitarian_needs.sql b/tests/sample_data/humanitarian_needs.sql new file mode 100644 index 00000000..9c2e3cca --- /dev/null +++ b/tests/sample_data/humanitarian_needs.sql @@ -0,0 +1,16 @@ +-- dummy data +INSERT INTO public.humanitarian_needs +(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab',2, 'all', 'ALL',0,99,'EDU','REF','AFF','y',500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); +INSERT INTO public.humanitarian_needs +(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 4, 'f', '0-17',0,17,'SHL','IDP','INN','n',1500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); +INSERT INTO public.humanitarian_needs +(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 6, 'm', '65+',65,100,'WSH','all','all','all',100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); +INSERT INTO public.humanitarian_needs +(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 4, 'f', '18-64',18,64,'SHL','IDP','INN','n',1500000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); +INSERT INTO public.humanitarian_needs +(resource_hdx_id, admin2_ref, gender, age_range, min_age, max_age, sector_code, population_group, population_status, disabled_marker, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab', 6, 'm', '18-64',18,64,'WSH','all','all','all',100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); diff --git a/tests/sample_data/location_admin.sql b/tests/sample_data/location_admin.sql new file mode 100644 index 00000000..ff89988b --- /dev/null +++ b/tests/sample_data/location_admin.sql @@ -0,0 +1,23 @@ +-- dummy data +INSERT INTO location (id, code, name, reference_period_start, reference_period_end) +VALUES +(1, 'FOO', 'Foolandia', '2023-01-01 00:00:00', NULL); + +-- dummy data +INSERT INTO admin1 (id, location_ref, code, name, is_unspecified, reference_period_start, reference_period_end) +VALUES +(1, 1, 'FOO-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), +(2, 1, 'FOO-001', 'Province 01', FALSE, '2023-01-01 00:00:00', NULL), +(3, 1, 'FOO-002', 'Province 02', FALSE, '2023-01-01 00:00:00', NULL); + +-- dummy data +-- note that we need an "Unspecified" for every Admin1, including the unspecified one +INSERT INTO admin2 (id, admin1_ref, code, name, is_unspecified, reference_period_start, reference_period_end) +VALUES +(1, 1, 'FOO-XXX-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), +(2, 2, 'FOO-001-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), +(3, 3, 'FOO-002-XXX', 'Unspecified', TRUE, '2023-01-01 00:00:00', NULL), +(4, 2, 'FOO-001-A', 'District A', FALSE, '2023-01-01 00:00:00', NULL), +(5, 2, 'FOO-001-B', 'District B', FALSE, '2023-01-01 00:00:00', NULL), +(6, 3, 'FOO-002-C', 'District C', FALSE, '2023-01-01 00:00:00', NULL), +(7, 3, 'FOO-002-D', 'District D', FALSE, '2023-01-01 00:00:00', NULL); \ No newline at end of file diff --git a/tests/sample_data/national_risk.sql b/tests/sample_data/national_risk.sql new file mode 100644 index 00000000..6785810e --- /dev/null +++ b/tests/sample_data/national_risk.sql @@ -0,0 +1,5 @@ + +INSERT INTO public.national_risk (resource_hdx_id, location_ref, risk_class, global_rank, overall_risk, hazard_exposure_risk, vulnerability_risk, coping_capacity_risk, meta_missing_indicators_pct, meta_avg_recentness_years, reference_period_start, reference_period_end) VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '5', 4, 8.7, 8.5, 7.1, 6.6, 11, 0.2571428571428571, '2024-01-02 00:00:00', '2024-12-31 23:59:59.999999'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '4', 5, 6.9, 7.9, 6.4, 6.6, 22.2, 0.3918918918918919, '2024-01-03 00:00:00', '2024-12-31 23:59:59.999999'), +('17acb541-9431-409a-80a8-50eda7e8ebab', 1, '3', 6, 7.2, 6.8, 5.9, 6.6, 33.3, 0.3918918918918919, '2024-01-04 00:00:00', '2024-12-31 23:59:59.999999'); diff --git a/tests/sample_data/operational_presence.sql b/tests/sample_data/operational_presence.sql new file mode 100644 index 00000000..b645123f --- /dev/null +++ b/tests/sample_data/operational_presence.sql @@ -0,0 +1,8 @@ +-- 3W +INSERT INTO operational_presence (resource_hdx_id, org_acronym, org_name, sector_code, admin2_ref, reference_period_start, reference_period_end) +VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG01', 'Organisation 1', 'SHL', 2, '2023-01-01 00:00:00', NULL), +('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG02', 'Organisation 2', 'FSC', 4, '2023-01-01 00:00:00', NULL), +('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG03', 'Organisation 3', 'WSH', 4, '2023-01-01 00:00:00', NULL), +('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG03', 'Organisation 3', 'HEA', 6, '2023-01-01 00:00:00', NULL), +('17acb541-9431-409a-80a8-50eda7e8ebab', 'ORG02', 'Organisation 2', 'WSH', 1, '2023-01-01 00:00:00', NULL); diff --git a/tests/sample_data/org.sql b/tests/sample_data/org.sql new file mode 100644 index 00000000..5da1a474 --- /dev/null +++ b/tests/sample_data/org.sql @@ -0,0 +1,5 @@ +-- dummy data +INSERT INTO org (acronym, name, org_type_code) VALUES +('ORG01', 'Organisation 1', '433'), +('ORG02', 'Organisation 2', '437'), +('ORG03', 'Organisation 3', '447'); \ No newline at end of file diff --git a/tests/sample_data/org_type.sql b/tests/sample_data/org_type.sql new file mode 100644 index 00000000..20f4d409 --- /dev/null +++ b/tests/sample_data/org_type.sql @@ -0,0 +1,16 @@ +-- dummy data +INSERT INTO org_type (code, description) VALUES +('431', 'Academic / Research'), +('433', 'Donor'), +('434', 'Embassy'), +('435', 'Government'), +('437', 'International NGO'), +('438', 'International Organization'), +('439', 'Media'), +('440', 'Military'), +('441', 'National NGO'), +('443', 'Other'), +('444', 'Private sector'), +('445', 'Red Cross / Red Crescent'), +('446', 'Religious'), +('447', 'United Nations'); diff --git a/tests/sample_data/population.sql b/tests/sample_data/population.sql new file mode 100644 index 00000000..032831ff --- /dev/null +++ b/tests/sample_data/population.sql @@ -0,0 +1,22 @@ +-- dummy data + +INSERT INTO population (resource_hdx_id,admin2_ref,gender,age_range,min_age,max_age,population,reference_period_start,reference_period_end) VALUES + ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'x','10-14',10,14,1000000,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'f','25-29',25,25,500001,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'m','10-14',10,14,489999,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'x','25-29',25,29,9999,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',1,'f','0-4',0,4,300000,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',2,'x','5-9',5,9,2000,'2023-01-01 00:00:00','2023-06-30 00:00:00'), + ('17acb541-9431-409a-80a8-50eda7e8ebab',4,'m','10-14',10,14,100000,'2023-01-01 00:00:00','2023-06-30 00:00:00'); + +-- INSERT INTO population (id, resource_ref, admin2_ref, gender_code, age_range_code, population, reference_period_start, reference_period_end, source_data) +-- VALUES +-- (1, 1, 1, 'x', '10-14', 1000000, '2023-01-01 00:00:00', '2023-06-30 00:00:00', 'DATA,DATA,DATA'), -- total national +-- (2, 1, 1, 'f', '25-29', 500001, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f), all ages +-- (3, 1, 1, 'm', '10-14', 489999, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f), all ages +-- (4, 1, 1, 'x', '25-29', 9999, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (x), all ages +-- (5, 1, 1, 'f', '0-4', 300000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- national (f) children +-- (6, 1, 2, 'x', '5-9', 2000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'), -- admin1 (x) adolescents +-- (7, 1, 4, 'm', '10-14', 100000, '2023-01-01 00:00:00', '2023-06-30 00:00:00','DATA,DATA,DATA'); -- admin2 (m) elderly + +-- -- end diff --git a/tests/sample_data/poverty_rate.sql b/tests/sample_data/poverty_rate.sql new file mode 100644 index 00000000..c9351851 --- /dev/null +++ b/tests/sample_data/poverty_rate.sql @@ -0,0 +1,10 @@ +-- dummy data +INSERT INTO poverty_rate( + resource_hdx_id, admin1_ref, admin1_name, mpi, headcount_ratio, intensity_of_deprivation, vulnerable_to_poverty, in_severe_poverty, reference_period_start, reference_period_end) +VALUES +('17acb541-9431-409a-80a8-50eda7e8ebab',2,'Province 01',0.617442,85.4,72.3,10.5,52.1,'2022-01-01 00:00:00', '2023-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',2,'Province 01',0.617442,85.4,72.3,10.5,52.1,'2022-01-02 00:00:00', '2023-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',2,'Province 01',0.617442,85.4,72.3,10.5,52.1,'2022-01-03 00:00:00', '2023-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',3,'Province 02',0.617442,85.4,72.3,10.5,52.1,'2022-01-04 00:00:00', '2023-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',3,'Province 02',0.617442,85.4,72.3,10.5,52.1,'2022-01-05 00:00:00', '2023-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',3,'Province 02',0.617442,85.4,72.3,10.5,52.1,'2022-01-06 00:00:00', '2023-01-01 00:00:00'); diff --git a/tests/sample_data/refugees.sql b/tests/sample_data/refugees.sql new file mode 100644 index 00000000..396e1348 --- /dev/null +++ b/tests/sample_data/refugees.sql @@ -0,0 +1,10 @@ +-- dummy data +INSERT INTO refugees +(resource_hdx_id, origin_location_ref, asylum_location_ref, population_group, gender, age_range, min_age, max_age, population, reference_period_start, reference_period_end) +VALUES ('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','f','12-17',12,17,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','m','12-17',12,17,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','f','18-59',18,59,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','m','18-59',18,59,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','f','60+',60,100,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','m','60+',60,100,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'), +('17acb541-9431-409a-80a8-50eda7e8ebab',1, 1, 'REF','all','ALL',0,99,100000,'2023-01-01 00:00:00', '2025-01-01 00:00:00'); diff --git a/tests/sample_data/sector.sql b/tests/sample_data/sector.sql new file mode 100644 index 00000000..2e2602e5 --- /dev/null +++ b/tests/sample_data/sector.sql @@ -0,0 +1,18 @@ +-- dummy data +INSERT INTO sector (code, name) +VALUES +('SHL', 'Emergency Shelter and NFI'), +('CCM', 'Camp Coordination / Management'), +('PRO-MIN', 'Mine Action'), +('FSC', 'Food Security'), +('WSH', 'Water Sanitation Hygiene'), +('LOG', 'Logistics'), +('PRO-CPN', 'Child Protection'), +('PRO', 'Protection'), +('EDU', 'Education'), +('NUT', 'Nutrition'), +('HEA', 'Health'), +('ERY', 'Early Recovery'), +('TEL', 'Emergency Telecommunications'), +('PRO-GBV', 'Gender Based Violence'), +('PRO-HLP', 'Housing, Land and Property'); \ No newline at end of file diff --git a/tests/sample_data/wfp_commodity.sql b/tests/sample_data/wfp_commodity.sql new file mode 100644 index 00000000..fec3e85c --- /dev/null +++ b/tests/sample_data/wfp_commodity.sql @@ -0,0 +1,4 @@ +INSERT INTO wfp_commodity (code, category, name) +VALUES +('001', 'vegetables and fruits', 'Commodity #1'), +('002', 'cereals and tubers', 'Commodity #2'); diff --git a/tests/sample_data/wfp_market.sql b/tests/sample_data/wfp_market.sql new file mode 100644 index 00000000..4292577c --- /dev/null +++ b/tests/sample_data/wfp_market.sql @@ -0,0 +1,5 @@ +-- 3W +INSERT INTO wfp_market (code, admin2_ref, name, lat, lon) +VALUES +('001', 4, 'Market #1', 0.1, 0.1), +('002', 1, 'Market #2', 0.2, 0.2); diff --git a/tests/test_analytics/__init__.py b/tests/test_analytics/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_analytics/test_api_call_tracking.py b/tests/test_analytics/test_api_call_tracking.py new file mode 100644 index 00000000..dddb7e21 --- /dev/null +++ b/tests/test_analytics/test_api_call_tracking.py @@ -0,0 +1,64 @@ +import logging +import time + +import pytest +from httpx import AsyncClient +from main import app +from unittest.mock import patch + +TEST_BASE_URL = 'http://test' +TEST_USER_AGENT = ( + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36' +) + +log = logging.getLogger(__name__) + +ENDPOINT = '/api/v1/coordination-context/operational-presence' + + +@pytest.mark.asyncio +async def test_tracking_endpoint_success(): + with patch('hdx_hapi.endpoints.middleware.util.util.send_mixpanel_event') as send_mixpanel_event_patch, patch( + 'hdx_hapi.endpoints.middleware.util.util.HashCodeGenerator.compute_hash', + return_value='123456', + ): + async with AsyncClient(app=app, base_url=TEST_BASE_URL) as ac: + headers = { + 'User-Agent': TEST_USER_AGENT, + 'HTTP_X_REAL_IP': '127.0.0.1', + } + params = {'admin_level': '1', 'output_format': 'json'} + response = await ac.get(ENDPOINT, params=params, headers=headers) + + assert response.status_code == 200 + assert send_mixpanel_event_patch.call_count == 1, 'API calls should be tracked' + + expected_mixpanel_dict = { + 'endpoint name': ENDPOINT, + 'query params': ['admin_level', 'output_format'], + 'time': pytest.approx(time.time()), + 'app name': None, + 'output format': 'json', + 'admin level': '1', + 'server side': True, + 'response code': 200, + 'user agent': TEST_USER_AGENT, + 'ip': '127.0.0.1', + '$os': 'Windows', + '$browser': 'Chrome', + '$browser_version': '124', + '$current_url': f'{TEST_BASE_URL}{ENDPOINT}?admin_level=1&output_format=json', + } + + # Check parameters match the expected ones + send_mixpanel_event_patch.assert_called_once_with('api call', '123456', expected_mixpanel_dict) + + +@pytest.mark.asyncio +async def test_docs_page_tracked(): + with patch('hdx_hapi.endpoints.middleware.util.util.send_mixpanel_event') as send_mixpanel_event_patch: + async with AsyncClient(app=app, base_url=TEST_BASE_URL) as ac: + response = await ac.get('/docs') + + assert response.status_code == 200 + assert send_mixpanel_event_patch.call_count == 1, 'Docs page should be tracked as a page view' diff --git a/tests/test_endpoints/endpoint_data.py b/tests/test_endpoints/endpoint_data.py index b4827b76..0640bc0d 100644 --- a/tests/test_endpoints/endpoint_data.py +++ b/tests/test_endpoints/endpoint_data.py @@ -1,16 +1,41 @@ from datetime import date +from hapi_schema.utils.enums import ( + CommodityCategory, + DisabledMarker, + EventType, + Gender, + PopulationGroup, + PopulationStatus, + PriceFlag, + PriceType, + RiskClass, + IPCPhase, + IPCType, +) + +from hdx_hapi.endpoints.util.util import AdminLevel + endpoint_data = { - '/api/admin1': { + '/api/v1/metadata/admin1': { 'query_parameters': { 'code': 'FoO-001', 'name': 'Province 01', 'location_code': 'FoO', 'location_name': 'Foolandia', + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-01T00:00:00', }, - 'expected_fields': ['code', 'name', 'location_code', 'location_name'], + 'expected_fields': [ + 'code', + 'name', + 'location_code', + 'location_name', + 'reference_period_start', + 'reference_period_end', + ], }, - '/api/admin2': { + '/api/v1/metadata/admin2': { 'query_parameters': { 'code': 'FoO-001-A', 'name': 'District A', @@ -18,73 +43,154 @@ 'admin1_name': 'Province 01', 'location_code': 'FOo', 'location_name': 'Foolandia', + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-01T00:00:00', }, - 'expected_fields': ['code', 'name', 'admin1_code', 'admin1_name', 'location_code', 'location_name'], - }, - '/api/age_range': { - 'query_parameters': {'code': '10-14'}, - 'expected_fields': ['code', 'age_min', 'age_max'], + 'expected_fields': [ + 'code', + 'name', + 'admin1_code', + 'admin1_name', + 'location_code', + 'location_name', + 'reference_period_start', + 'reference_period_end', + ], }, - '/api/dataset': { + '/api/v1/metadata/dataset': { 'query_parameters': { - 'hdx_id': 'c3f001fa-b45b-464c-9460-1ca79fd39b40', - 'title': 'Dataset #1', - 'hdx_provider_stub': 'Provider01', + 'dataset_hdx_id': '90deb235-1bf5-4bae-b231-3393222c2d01', + 'dataset_hdx_title': 'Dataset #1', + 'hdx_provider_stub': 'pRoViDeR01', 'hdx_provider_name': 'Provider #1', }, 'expected_fields': [ - 'hdx_id', - 'hdx_stub', - 'title', + 'dataset_hdx_id', + 'dataset_hdx_stub', + 'dataset_hdx_title', 'hdx_provider_stub', 'hdx_provider_name', 'hdx_link', # computed field 'hdx_api_link', # computed field + 'provider_hdx_link', # computed field + 'provider_hdx_api_link', # computed field ], }, - '/api/gender': { - 'query_parameters': {'code': 'F', 'name': 'female'}, - 'expected_fields': ['code', 'description'], + '/api/v1/metadata/location': { + 'query_parameters': { + 'code': 'foo', + 'name': 'Foolandia', + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-01T00:00:00', + }, + 'expected_fields': [ + 'code', + 'name', + 'reference_period_start', + 'reference_period_end', + ], }, - '/api/location': { - 'query_parameters': {'code': 'foo', 'name': 'Foolandia'}, - 'expected_fields': ['code', 'name'], + '/api/v1/coordination-context/conflict-event': { + 'query_parameters': { + 'event_type': EventType.POLITICAL_VIOLENCE.value, + 'location_ref': 1, + 'location_code': 'foo', + 'location_name': 'Foolandia', + 'admin1_ref': 2, + 'admin1_code': 'foo-001', + 'admin1_name': 'province', + 'admin2_ref': 4, + 'admin2_code': 'foo-001-a', + 'admin2_name': 'district', + # 'reference_period_start_min': '2024-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-02T00:00:00', + # 'reference_period_end_min': '2024-01-30T00:00:00', + # 'reference_period_end_max': '2024-02-01T00:00:00', + }, + 'expected_fields': [ + 'event_type', + 'events', + 'fatalities', + 'resource_hdx_id', + 'location_ref', + 'location_code', + 'location_name', + 'admin1_ref', + 'admin1_code', + 'admin1_name', + 'admin2_ref', + 'admin2_code', + 'admin2_name', + 'reference_period_start', + 'reference_period_end', + ], }, - '/api/themes/3W': { + '/api/v1/coordination-context/funding': { + 'query_parameters': { + 'appeal_code': 'hfoo24', + 'appeal_type': 'hRp', + 'location_code': 'foo', + 'location_name': 'Foolandia', + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-01T00:00:00', + }, + 'expected_fields': [ + 'resource_hdx_id', + 'appeal_code', + 'appeal_name', + 'appeal_type', + 'requirements_usd', + 'funding_usd', + 'funding_pct', + 'location_ref', + 'location_code', + 'location_name', + 'reference_period_start', + 'reference_period_end', + ], + }, + '/api/v1/coordination-context/operational-presence': { 'query_parameters': { - 'sector_code': 'Shl', - 'dataset_hdx_provider_stub': 'PROVIDER01', - 'resource_update_date_min': date(2023, 6, 1), - 'resource_update_date_max': date(2023, 6, 2), 'org_acronym': 'oRG01', 'org_name': 'Organisation 1', + 'sector_code': 'Shl', 'sector_name': 'Emergency Shelter and NFI', + 'location_ref': 1, 'location_code': 'foo', 'location_name': 'Foolandia', + 'admin1_ref': 2, 'admin1_code': 'foo-001', + 'admin1_name': 'province', 'admin1_is_unspecified': False, + 'admin2_ref': 2, 'admin2_code': 'foo-001-xxx', 'admin2_name': 'Unspecified', 'admin2_is_unspecified': True, + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-01T00:00:00', }, 'expected_fields': [ 'sector_code', - 'dataset_hdx_stub', 'resource_hdx_id', 'org_acronym', 'org_name', + 'org_type_code', + 'org_type_description', 'sector_name', + 'location_ref', 'location_code', 'location_name', 'reference_period_start', 'reference_period_end', + 'admin1_ref', 'admin1_code', 'admin1_name', + 'admin2_ref', 'admin2_code', 'admin2_name', ], }, - '/api/org': { + '/api/v1/metadata/org': { 'query_parameters': { 'acronym': 'Org01', 'name': 'Organisation 1', @@ -93,102 +199,178 @@ }, 'expected_fields': ['acronym', 'name', 'org_type_code', 'org_type_description'], }, - '/api/org_type': { + '/api/v1/metadata/org-type': { 'query_parameters': { 'code': '431', - 'name': 'national', # International + 'name': 'Academic / Research', }, 'expected_fields': ['code', 'description'], }, - '/api/themes/population': { + '/api/v1/population-social/population': { 'query_parameters': { - 'gender_code': 'X', - 'age_range_code': '10-14', - 'population': 1000000, - 'dataset_hdx_provider_stub': 'PROvider01', - 'resource_update_date_min': date(2023, 6, 1), - 'resource_update_date_max': date(2023, 6, 2), + 'admin2_ref': 1, + 'gender': Gender.NONBINARY.value, + 'age_range': '10-14', + 'min_age': 10, + 'max_age': 14, + 'population_min': 0, + 'population_max': 10000000, + #'reference_period_start_min': '2020-01-01T00:00:00', + #'reference_period_end_max': '2024-01-01T00:00:00', 'location_code': 'fOO', 'location_name': 'Foolandia', 'admin1_code': 'FOO-xxx', - 'admin1_is_unspecified': True, + 'admin1_is_unspecified': False, 'admin2_code': 'FOO-xxx-XXX', 'admin2_name': 'Unspecified', 'admin2_is_unspecified': True, }, 'expected_fields': [ - 'gender_code', - 'age_range_code', + 'resource_hdx_id', + 'admin2_ref', + 'gender', + 'age_range', + 'min_age', + 'max_age', 'population', 'reference_period_start', 'reference_period_end', - 'dataset_hdx_stub', - 'resource_hdx_id', + 'location_ref', 'location_code', 'location_name', + 'admin1_ref', 'admin1_code', 'admin1_name', 'admin2_code', 'admin2_name', ], }, - '/api/population_group': { + '/api/v1/population-social/poverty-rate': { 'query_parameters': { - 'code': 'refugees', - 'description': 'refugee', # refugees + 'admin1_name': 'Province 01', + 'mpi_min': 0.01, + 'mpi_max': 0.9, + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_end_max': '2024-01-01T00:00:00', + 'location_code': 'fOO', + 'location_name': 'Foolandia', }, - 'expected_fields': ['code', 'description'], - }, - '/api/population_status': { - 'query_parameters': {'code': 'inneed', 'description': 'people'}, - 'expected_fields': ['code', 'description'], + 'expected_fields': [ + 'resource_hdx_id', + 'admin1_name', + 'mpi', + 'headcount_ratio', + 'intensity_of_deprivation', + 'vulnerable_to_poverty', + 'in_severe_poverty', + 'reference_period_start', + 'reference_period_end', + 'location_code', + 'location_name', + ], }, - '/api/themes/food_security': { + '/api/v1/food/food-security': { 'query_parameters': { - 'ipc_phase_code': '1', - 'ipc_type_code': 'current', - 'dataset_hdx_provider_stub': 'PROvider01', - 'resource_update_date_min': date(2023, 6, 1), - 'resource_update_date_max': date(2023, 6, 2), + 'admin2_ref': 1, + 'ipc_phase': IPCPhase.PHASE_1.value, + 'ipc_type': IPCType.CURRENT.value, + 'location_ref': 1, 'location_code': 'fOO', 'location_name': 'Foolandia', 'admin1_code': 'FOO-xxx', - 'admin1_is_unspecified': True, + # 'admin1_is_unspecified': True, 'admin2_code': 'FOO-xxx-XXX', 'admin2_name': 'Unspecified', - 'admin2_is_unspecified': True, + # 'admin2_is_unspecified': True, + 'admin_level': AdminLevel.ZERO.value, + # 'reference_period_start': date(2023, 6, 1), + # 'reference_period_end': date(2023, 6, 2), }, 'expected_fields': [ + 'resource_hdx_id', + 'admin2_ref', + 'ipc_phase', + 'ipc_type', 'population_in_phase', 'population_fraction_in_phase', - 'ipc_phase_code', - 'ipc_phase_name', - 'ipc_type_code', 'reference_period_start', 'reference_period_end', - 'dataset_hdx_stub', - 'dataset_hdx_provider_stub', + 'location_code', + 'location_name', + 'admin1_code', + 'admin1_name', + 'location_ref', + 'admin2_code', + 'admin2_name', + 'admin1_ref', + ], + }, + '/api/v1/food/food-price': { + 'query_parameters': { + 'market_code': '002', + 'market_name': 'market', + 'commodity_code': '001', + 'commodity_name': 'commodity', + 'commodity_category': CommodityCategory.VEGETABLES_FRUITS.value, + 'price_flag': PriceFlag.AGGREGATE.value, + 'price_type': PriceType.WHOLESALE.value, + 'price_min': '200.1', + 'price_max': '200.3', + 'location_ref': 1, + 'location_code': 'fOO', + 'location_name': 'Foolandia', + 'admin1_ref': 1, + 'admin1_code': 'FOO-xxx', + 'admin1_name': 'Unspecified', + 'admin2_ref': 1, + 'admin2_code': 'FOO-xxx-XXX', + 'admin2_name': 'Unspecified', + 'admin_level': AdminLevel.ZERO.value, + }, + 'expected_fields': [ 'resource_hdx_id', + 'market_code', + 'market_name', + 'commodity_code', + 'commodity_name', + 'commodity_category', + 'price_flag', + 'price_type', + 'price', + 'unit', + 'currency_code', + 'lat', + 'lon', + 'reference_period_start', + 'reference_period_end', + 'location_ref', 'location_code', 'location_name', + 'admin1_ref', 'admin1_code', 'admin1_name', + 'admin2_ref', 'admin2_code', 'admin2_name', ], }, - '/api/themes/national_risk': { + '/api/v1/coordination-context/national-risk': { 'query_parameters': { - 'risk_class': 5, - 'global_rank': 4, - 'overall_risk': 8.1, - 'hazard_exposure_risk': 8.7, - 'vulnerability_risk': 8.5, - 'coping_capacity_risk': 7.1, - 'dataset_hdx_provider_stub': 'pRoViDeR01', - 'resource_update_date_min': date(2023, 6, 1), - 'resource_update_date_max': date(2023, 6, 2), - # 'sector_name': 'Emergency Shelter and NFI', + 'risk_class': RiskClass.HIGH.value, + 'global_rank_min': 5, + 'global_rank_max': 7, + 'overall_risk_min': 6, + 'overall_risk_max': 10, + 'hazard_exposure_risk_min': 6, + 'hazard_exposure_risk_max': 10, + 'vulnerability_risk_min': 5, + 'vulnerability_risk_max': 10, + 'coping_capacity_risk_min': 6.1, + 'coping_capacity_risk_max': 10.1, + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2024-01-11T00:00:00', + # 'reference_period_end_min': '2023-01-01T00:00:00', + # 'reference_period_end_max': '2025-01-01T00:00:00', 'location_code': 'fOO', 'location_name': 'Foolandia', }, @@ -203,99 +385,178 @@ 'meta_avg_recentness_years', 'reference_period_start', 'reference_period_end', - 'dataset_hdx_stub', - 'dataset_hdx_provider_stub', 'resource_hdx_id', - # "sector_name", 'location_code', 'location_name', ], }, - '/api/themes/humanitarian_needs': { + '/api/v1/affected-people/humanitarian-needs': { 'query_parameters': { - 'gender_code': 'm', - 'age_range_code': '0-4', - 'disabled_marker': False, + 'admin2_ref': 2, + 'gender': Gender.ALL.value, + 'age_range': 'ALL', + 'disabled_marker': DisabledMarker.YES.value, 'sector_code': 'EDU', + 'population_group': PopulationGroup.REFUGEES.value, + 'population_status': PopulationStatus.AFFECTED.value, + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2026-01-01T00:00:00', 'sector_name': 'Education', - 'population_group_code': 'refugees', - 'population_status_code': 'inneed', - 'population': 100, - 'dataset_hdx_provider_stub': 'PROvider01', - 'resource_update_date_min': date(2023, 6, 1), - 'resource_update_date_max': date(2023, 6, 2), - 'location_code': 'fOO', + 'location_code': 'foo', 'location_name': 'Foolandia', - 'admin1_code': 'FOO-xxx', - 'admin1_is_unspecified': True, - 'admin2_code': 'FOO-xxx-XXX', + 'location_ref': 1, + 'admin1_code': 'FOO-001', + 'admin1_name': 'Province 01', + 'admin2_code': 'foo-001-XXX', 'admin2_name': 'Unspecified', - 'admin2_is_unspecified': True, + 'admin1_ref': 2, }, 'expected_fields': [ - 'gender_code', - 'age_range_code', + 'resource_hdx_id', + 'admin2_ref', + 'gender', + 'age_range', + 'min_age', + 'max_age', 'disabled_marker', 'sector_code', - 'population_group_code', - 'population_status_code', + 'population_group', + 'population_status', 'population', 'reference_period_start', 'reference_period_end', - 'dataset_hdx_stub', - 'dataset_hdx_provider_stub', - 'resource_hdx_id', 'sector_name', 'location_code', 'location_name', + 'location_ref', 'admin1_code', 'admin1_name', 'admin2_code', 'admin2_name', + 'admin1_ref', ], }, - '/api/resource': { + '/api/v1/affected-people/refugees': { 'query_parameters': { - 'hdx_id': '90deb235-1bf5-4bae-b231-3393222c2d01', + 'population_group': PopulationGroup.REFUGEES.value, + 'gender': Gender.ALL.value, + 'age_range': 'ALL', + # 'reference_period_start_min': '2020-01-01T00:00:00', + # 'reference_period_start_max': '2026-01-01T00:00:00', + 'origin_location_code': 'foo', + 'origin_location_name': 'Foolandia', + 'asylum_location_code': 'foo', + 'asylum_location_name': 'Foolandia', + }, + 'expected_fields': [ + 'resource_hdx_id', + 'origin_location_ref', + 'asylum_location_ref', + 'population_group', + 'gender', + 'age_range', + 'min_age', + 'max_age', + 'population', + 'reference_period_start', + 'reference_period_end', + 'origin_location_code', + 'origin_location_name', + 'asylum_location_code', + 'asylum_location_name', + ], + }, + '/api/v1/metadata/resource': { + 'query_parameters': { + 'resource_hdx_id': '17acb541-9431-409a-80a8-50eda7e8ebab', + 'name': 'resource-01.csv', 'format': 'csv', 'update_date_min': date(2023, 6, 1), - 'update_date_max': date(2023, 6, 2), + 'update_date_max': date(2023, 6, 3), 'is_hxl': True, - 'dataset_hdx_id': 'c3f001fa-b45b-464c-9460-1ca79fd39b40', - 'dataset_title': 'Dataset #1', + 'hapi_updated_date': date(2023, 6, 2), + 'dataset_hdx_stub': 'dataset01', + 'dataset_hdx_title': 'Dataset #1', 'dataset_hdx_provider_stub': 'pRoViDeR01', 'dataset_hdx_provider_name': 'Provider #1', }, 'expected_fields': [ - 'hdx_id', + 'resource_hdx_id', + 'dataset_hdx_id', 'name', 'format', 'update_date', 'is_hxl', 'download_url', - 'dataset_hdx_id', + 'hapi_updated_date', 'dataset_hdx_stub', - 'dataset_title', + 'dataset_hdx_title', 'dataset_hdx_provider_stub', 'dataset_hdx_provider_name', 'hdx_link', # computed field 'hdx_api_link', # computed field 'dataset_hdx_link', # computed field 'dataset_hdx_api_link', # computed field + 'provider_hdx_link', # computed field + 'provider_hdx_api_link', # computed field ], }, - '/api/sector': { + '/api/v1/metadata/sector': { 'query_parameters': { 'code': 'Pro', - 'name': 'Protect', # Protection + 'name': 'Protection', # Protection + }, + 'expected_fields': ['code', 'name'], + }, + '/api/v1/metadata/currency': { + 'query_parameters': { + 'code': 'usD', }, 'expected_fields': ['code', 'name'], }, - '/api/encode_identifier': { + '/api/v1/metadata/wfp-commodity': { + 'query_parameters': { + 'code': '001', + 'name': 'commodity', + 'category': CommodityCategory.VEGETABLES_FRUITS.value, + }, + 'expected_fields': ['code', 'name', 'category'], + }, + '/api/v1/metadata/wfp-market': { + 'query_parameters': { + 'code': '001', + 'name': 'market', + 'location_ref': 1, + 'location_code': 'foo', + 'location_name': 'Foolandia', + 'admin1_ref': 2, + 'admin1_code': 'foo-001', + 'admin1_name': 'province', + 'admin2_ref': 4, + 'admin2_code': 'foo-001-a', + 'admin2_name': 'district', + }, + 'expected_fields': [ + 'code', + 'name', + 'lat', + 'lon', + 'location_ref', + 'location_code', + 'location_name', + 'admin1_ref', + 'admin1_code', + 'admin1_name', + 'admin2_ref', + 'admin2_code', + 'admin2_name', + ], + }, + '/api/encode_app_identifier': { 'query_parameters': { 'application': 'web_application_1', 'email': 'info@example.com', }, - 'expected_fields': ['encoded_identifier'], + 'expected_fields': ['encoded_app_identifier'], }, } diff --git a/tests/test_endpoints/test_admin1_endpoint.py b/tests/test_endpoints/test_admin1_endpoint.py index 5b098c09..d013e193 100644 --- a/tests/test_endpoints/test_admin1_endpoint.py +++ b/tests/test_endpoints/test_admin1_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/admin1' +ENDPOINT_ROUTER = '/api/v1/metadata/admin1' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_admin1(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - response_items = response.json() + response_items = response.json()['data'] assert len(response_items) > 0, 'There should be at least one admin1 entry in the database' unspecified_list = [item for item in response_items if item['name'] == 'Unspecified'] assert len(unspecified_list) == 0, 'Unspecified admin2 entries should not be returned' @@ -34,15 +34,18 @@ async def test_get_admin1_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one admin1 entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one admin1 entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one admin1 entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one admin1 entry for all parameters in the database' @pytest.mark.asyncio @@ -53,6 +56,8 @@ async def test_get_admin1_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_admin2_endpoint.py b/tests/test_endpoints/test_admin2_endpoint.py index 444c0247..d83db068 100644 --- a/tests/test_endpoints/test_admin2_endpoint.py +++ b/tests/test_endpoints/test_admin2_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/admin2' +ENDPOINT_ROUTER = '/api/v1/metadata/admin2' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_admin2(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - response_items = response.json() + response_items = response.json()['data'] assert len(response_items) > 0, 'There should be at least one admin2 entry in the database' unspecified_list = [item for item in response_items if item['name'] == 'Unspecified'] assert len(unspecified_list) == 0, 'Unspecified admin2 entries should not be returned' @@ -34,15 +34,18 @@ async def test_get_admin2_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one admin2 entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one admin2 entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one admin2 entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one admin2 entry for all parameters in the database' @pytest.mark.asyncio @@ -53,6 +56,8 @@ async def test_get_admin2_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_conflict_event_endpoint.py b/tests/test_endpoints/test_conflict_event_endpoint.py new file mode 100644 index 00000000..4e0103a0 --- /dev/null +++ b/tests/test_endpoints/test_conflict_event_endpoint.py @@ -0,0 +1,173 @@ +from datetime import datetime +from hapi_schema.utils.enums import EventType +import pytest +import logging + +from httpx import AsyncClient +from hdx_hapi.endpoints.models.conflict_event import ConflictEventResponse +from main import app +from tests.test_endpoints.endpoint_data import endpoint_data + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER = '/api/v1/coordination-context/conflict-event' +endpoint_data = endpoint_data[ENDPOINT_ROUTER] +query_parameters = endpoint_data['query_parameters'] +expected_fields = endpoint_data['expected_fields'] + + +@pytest.mark.asyncio +async def test_get_conflict_events(event_loop, refresh_db): + log.info('started test_get_conflict_events') + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(ENDPOINT_ROUTER) + assert response.status_code == 200 + assert len(response.json()['data']) > 0, 'There should be at least one operational presence in the database' + + +@pytest.mark.asyncio +async def test_get_conflict_event_params(event_loop, refresh_db): + log.info('started test_get_conflict_event_params') + + for param_name, param_value in query_parameters.items(): + async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert len(response.json()['data']) > 0, ( + 'There should be at least one conflict_event entry for parameter ' + f'"{param_name}" with value "{param_value}" in the database' + ) + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one conflict_event entry for all parameters in the database' + + +@pytest.mark.asyncio +async def test_get_conflict_event_result(event_loop, refresh_db): + log.info('started test_get_conflict_event_result') + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + for field in expected_fields: + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' + + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' + + +@pytest.mark.asyncio +async def test_get_conflict_event_adm_fields(event_loop, refresh_db): + log.info('started test_get_conflict_event_adm_fields') + + conflict_event_view_adm_specified = ConflictEventResponse( + resource_hdx_id='test-resource1', + event_type=EventType.CIVILIAN_TARGETING, + events=10, + fatalities=2, + location_ref=1, + location_code='Foolandia', + location_name='FOO-XXX', + admin1_ref=1, + admin1_is_unspecified=False, + admin1_code='FOO-XXX', + admin1_name='Province 01', + admin2_ref=1, + admin2_is_unspecified=False, + admin2_code='FOO-XXX-XXX', + admin2_name='District A', + reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'), + reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'), + ) + + assert ( + conflict_event_view_adm_specified.admin1_code == 'FOO-XXX' + ), 'admin1_code should keep its value when admin1_is_unspecified is False' + assert ( + conflict_event_view_adm_specified.admin1_name == 'Province 01' + ), 'admin1_name should keep its value when admin1_is_unspecified is False' + assert ( + conflict_event_view_adm_specified.admin2_code == 'FOO-XXX-XXX' + ), 'admin2_code should keep its value when admin1_is_unspecified is False' + assert ( + conflict_event_view_adm_specified.admin2_name == 'District A' + ), 'admin2_name should keep its value when admin1_is_unspecified is False' + + conflict_event_view_adm_unspecified = ConflictEventResponse( + resource_hdx_id='test-resource1', + event_type=EventType.CIVILIAN_TARGETING, + events=10, + fatalities=2, + location_ref=1, + location_code='Foolandia', + location_name='FOO-XXX', + admin1_is_unspecified=True, + admin1_ref=1, + admin1_code='FOO-XXX', + admin1_name='Unpecified', + admin2_ref=1, + admin2_is_unspecified=True, + admin2_code='FOO-XXX-XXX', + admin2_name='Unspecified', + reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'), + reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'), + ) + + assert ( + conflict_event_view_adm_unspecified.admin1_code is None + ), 'admin1_code should be changed to None when admin1_is_unspecified is True' + assert ( + conflict_event_view_adm_unspecified.admin1_name is None + ), 'admin1_name should be changed to None when admin1_is_unspecified is True' + assert ( + conflict_event_view_adm_unspecified.admin2_code is None + ), 'admin2_code should be changed to None when admin1_is_unspecified is True' + assert ( + conflict_event_view_adm_unspecified.admin2_name is None + ), 'admin2_name should be changed to None when admin1_is_unspecified is True' + + +@pytest.mark.asyncio +async def test_get_conflict_event_admin_level(event_loop, refresh_db): + log.info('started test_get_conflict_event_admin_level') + + async with AsyncClient( + app=app, + base_url='http://test', + ) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' + + response_items = response.json()['data'] + admin_0_count = len( + [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None] + ) + admin_1_count = len( + [item for item in response_items if item['admin1_name'] is not None and item['admin2_name'] is None] + ) + admin_2_count = len( + [item for item in response_items if item['admin1_name'] is not None and item['admin2_name'] is not None] + ) + counts_map = { + '0': admin_0_count, + '1': admin_1_count, + '2': admin_2_count, + } + + for admin_level, count in counts_map.items(): + async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac: + response = await ac.get(ENDPOINT_ROUTER) + assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries' diff --git a/tests/test_endpoints/test_population_status_endpoint.py b/tests/test_endpoints/test_currency_endpoint.py similarity index 52% rename from tests/test_endpoints/test_population_status_endpoint.py rename to tests/test_endpoints/test_currency_endpoint.py index 9a321724..ae07a13c 100644 --- a/tests/test_endpoints/test_population_status_endpoint.py +++ b/tests/test_endpoints/test_currency_endpoint.py @@ -7,49 +7,54 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/population_status' +ENDPOINT_ROUTER = '/api/v1/metadata/currency' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @pytest.mark.asyncio -async def test_get_population_status(event_loop, refresh_db): - log.info('started test_get_population_status') +async def test_get_currencies(event_loop, refresh_db): + log.info('started test_get_currencies') async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population status in the database' + assert len(response.json()['data']) > 0, 'There should be at least one currency in the database' @pytest.mark.asyncio -async def test_get_population_status_params(event_loop, refresh_db): - log.info('started test_get_population_status_params') +async def test_get_currency_params(event_loop, refresh_db): + log.info('started test_get_currency_params') for param_name, param_value in query_parameters.items(): async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one population status entry for parameter "{param_name}" ' \ - f'with value "{param_value}" in the database' + assert len(response.json()['data']) > 0, ( + f'There should be at least one currency entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population status for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one currency entry for all parameters in the database' @pytest.mark.asyncio -async def test_get_population_status_result(event_loop, refresh_db): - log.info('started test_get_population_status_result') +async def test_get_currency_result(event_loop, refresh_db): + log.info('started test_get_currency_result') async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_dataset_endpoint.py b/tests/test_endpoints/test_dataset_endpoint.py index dc8a3d70..17246d7a 100644 --- a/tests/test_endpoints/test_dataset_endpoint.py +++ b/tests/test_endpoints/test_dataset_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/dataset' +ENDPOINT_ROUTER = '/api/v1/metadata/dataset' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_datasets(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one dataset in the database' + assert len(response.json()['data']) > 0, 'There should be at least one dataset in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_dataset_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one dataset entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one dataset entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one dataset entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one dataset entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +53,8 @@ async def test_get_dataset_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_encode_identifier.py b/tests/test_endpoints/test_encode_identifier.py index b4a5f277..69bb81ff 100644 --- a/tests/test_endpoints/test_encode_identifier.py +++ b/tests/test_endpoints/test_encode_identifier.py @@ -1,6 +1,7 @@ import base64 import pytest import logging +from unittest.mock import ANY from httpx import AsyncClient from main import app @@ -8,41 +9,27 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/encode_identifier' +ENDPOINT_ROUTER = '/api/encode_app_identifier' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @pytest.mark.asyncio -async def test_get_encoded_identifier(event_loop, refresh_db): - log.info('started test_get_encoded_identifier') - async with AsyncClient(app=app, base_url='http://test') as ac: - response = await ac.get(ENDPOINT_ROUTER) - assert response.status_code == 200 - response_items = response.json() - assert len(response_items) == 1, 'One entry should be returned for encoded identifier' - - -@pytest.mark.asyncio -async def test_get_encoded_identifier_params(event_loop, refresh_db): - log.info('started test_get_encoded_identifier_params') - - for param_name, param_value in query_parameters.items(): - async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: - response = await ac.get(ENDPOINT_ROUTER) - - assert response.status_code == 200 - assert len(response.json()) == 1, ( - 'There should be at one encoded_identifier entry for parameter ' - f'"{param_name}" with value "{param_value}" in the database' - ) +async def test_encoded_identifier_refuses_empty_parameters(event_loop, refresh_db): + log.info('started test_encoded_identifier_refuses_empty_parameters') - async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) - assert response.status_code == 200 - assert len(response.json()) == 1, 'There should be at one encoded_identifier entry for all parameters' + assert response.status_code == 422 + # The url key depends on the Pydantic version which we do not pin + assert response.json() == { + 'detail': [ + {'type': 'missing', 'loc': ['query', 'application'], 'msg': 'Field required', 'input': None, 'url': ANY}, + {'type': 'missing', 'loc': ['query', 'email'], 'msg': 'Field required', 'input': None, 'url': ANY}, + ] + } @pytest.mark.asyncio @@ -56,9 +43,10 @@ async def test_get_encoded_identifier_results(event_loop, refresh_db): assert field in response.json(), f'Field "{field}" not found in the response' assert len(response.json()) == len(expected_fields), 'Response has a different number of fields than expected' - assert response.json() == {'encoded_identifier': 'd2ViX2FwcGxpY2F0aW9uXzE6aW5mb0BleGFtcGxlLmNvbQ=='} + assert response.json() == {'encoded_app_identifier': 'd2ViX2FwcGxpY2F0aW9uXzE6aW5mb0BleGFtcGxlLmNvbQ=='} assert ( - base64.b64decode(response.json()['encoded_identifier']).decode('utf-8') == 'web_application_1:info@example.com' + base64.b64decode(response.json()['encoded_app_identifier']).decode('utf-8') + == 'web_application_1:info@example.com' ) diff --git a/tests/test_endpoints/test_endpoints_vs_encode_identifier.py b/tests/test_endpoints/test_endpoints_vs_encode_identifier.py new file mode 100644 index 00000000..7f8d2b57 --- /dev/null +++ b/tests/test_endpoints/test_endpoints_vs_encode_identifier.py @@ -0,0 +1,65 @@ +# import base64 +import pytest +import logging + +from httpx import AsyncClient +from main import app +# from tests.test_endpoints.endpoint_data import endpoint_data + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER_LIST = [ + '/api/v1/metadata/admin1', + '/api/v1/metadata/admin2', + '/api/v1/metadata/dataset', + '/api/v1/affected-people/humanitarian-needs', + '/api/v1/metadata/location', + '/api/v1/metadata/org', + '/api/v1/metadata/org-type', + '/api/v1/metadata/resource', + '/api/v1/metadata/sector', + '/api/v1/population-social/population', + '/api/v1/population-social/poverty-rate', + '/api/v1/coordination-context/national-risk', + '/api/v1/coordination-context/operational-presence', + '/api/v1/affected-people/refugees', + '/api/v1/coordination-context/funding', + '/api/v1/coordination-context/conflict-event', + '/api/v1/food/food-security', + '/api/v1/metadata/currency', +] + + +APP_IDENTIFIER = 'aGFwaV90ZXN0OmhhcGlAaHVtZGF0YS5vcmc=' +query_parameters = {'app_identifier': APP_IDENTIFIER} + + +@pytest.mark.asyncio +async def test_endpoints_vs_encode_identifier(event_loop, refresh_db, enable_hapi_identifier_filtering): + log.info('started test_endpoints_vs_encode_identifier') + + for endpoint_router in ENDPOINT_ROUTER_LIST: + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(endpoint_router) + assert response.status_code == 400 + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(endpoint_router) + assert response.status_code == 200 + response_items = response.json() + assert len(response_items) > 0 + + +@pytest.mark.asyncio +async def test_encode_identifier(event_loop, refresh_db, enable_hapi_identifier_filtering): + # testing the encode identifier endpoint + endpoint_router = '/api/v1/encode_app_identifier' + + # it should not be important if app_identifier is passed or not to the endpoint + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(endpoint_router) + assert response.status_code == 422 + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(endpoint_router) + assert response.status_code == 422 diff --git a/tests/test_endpoints/test_food_price_endpoint.py b/tests/test_endpoints/test_food_price_endpoint.py new file mode 100644 index 00000000..6d73013b --- /dev/null +++ b/tests/test_endpoints/test_food_price_endpoint.py @@ -0,0 +1,63 @@ +import pytest +import logging + +from httpx import AsyncClient +from main import app +from tests.test_endpoints.endpoint_data import endpoint_data + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER = '/api/v1/food/food-price' +endpoint_data = endpoint_data[ENDPOINT_ROUTER] +query_parameters = endpoint_data['query_parameters'] +expected_fields = endpoint_data['expected_fields'] + + +@pytest.mark.asyncio +async def test_get_food_prices(event_loop, refresh_db): + log.info('started test_get_food_prices') + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(ENDPOINT_ROUTER) + assert response.status_code == 200 + assert len(response.json()['data']) > 0, 'There should be at least one food_price in the database' + + +@pytest.mark.asyncio +async def test_get_food_price_params(event_loop, refresh_db): + log.info('started test_get_food_price_params') + + for param_name, param_value in query_parameters.items(): + async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert len(response.json()['data']) > 0, ( + f'There should be at least one food_price entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one food_price entry for all parameters in the database' + + +@pytest.mark.asyncio +async def test_get_food_price_result(event_loop, refresh_db): + log.info('started test_get_food_price_result') + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + for field in expected_fields: + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' + + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_food_security_endpoint.py b/tests/test_endpoints/test_food_security_endpoint.py index 69b062a6..5925610c 100644 --- a/tests/test_endpoints/test_food_security_endpoint.py +++ b/tests/test_endpoints/test_food_security_endpoint.py @@ -8,7 +8,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/themes/food_security' +ENDPOINT_ROUTER = '/api/v1/food/food-security' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -20,7 +20,7 @@ async def test_get_food_security(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one food security entry in the database' + assert len(response.json()['data']) > 0, 'There should be at least one food security entry in the database' @pytest.mark.asyncio @@ -28,19 +28,23 @@ async def test_get_food_security_params(event_loop, refresh_db): log.info('started test_get_food_security_params') for param_name, param_value in query_parameters.items(): + log.info(f'{param_name}:{param_value}') async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one food_security entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + f'There should be at least one food_security entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one food_security entry for all parameters in the db' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one food_security entry for all parameters in the db' @pytest.mark.asyncio @@ -51,9 +55,11 @@ async def test_get_food_security_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' @pytest.mark.asyncio @@ -63,44 +69,47 @@ async def test_get_food_security_adm_fields(event_loop, refresh_db): food_security_view_adm_specified = FoodSecurityResponse( population_in_phase=8225, population_fraction_in_phase=0.02, - ipc_phase_code='2', - ipc_phase_name='Phase 2: Stressed', - ipc_type_code='current', - dataset_hdx_provider_stub='provider01', - dataset_hdx_stub='test-dataset1', + ipc_phase='2', + ipc_type='current', resource_hdx_id='test-resource1', + location_ref=1, location_code='Foolandia', location_name='FOO-XXX', + admin1_ref=1, admin1_is_unspecified=False, + admin2_ref=1, admin1_code='FOO-XXX', admin1_name='Province 01', admin2_is_unspecified=False, admin2_code='FOO-XXX-XXX', admin2_name='District A', reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + reference_period_end='2023-03-31 23:59:59', ) - assert food_security_view_adm_specified.admin1_code == 'FOO-XXX', \ - 'admin1_code should keep its value when admin1_is_unspecified is False' - assert food_security_view_adm_specified.admin1_name == 'Province 01', \ - 'admin1_name should keep its value when admin1_is_unspecified is False' - assert food_security_view_adm_specified.admin2_code == 'FOO-XXX-XXX', \ - 'admin2_code should keep its value when admin1_is_unspecified is False' - assert food_security_view_adm_specified.admin2_name == 'District A', \ - 'admin2_name should keep its value when admin1_is_unspecified is False' + assert ( + food_security_view_adm_specified.admin1_code == 'FOO-XXX' + ), 'admin1_code should keep its value when admin1_is_unspecified is False' + assert ( + food_security_view_adm_specified.admin1_name == 'Province 01' + ), 'admin1_name should keep its value when admin1_is_unspecified is False' + assert ( + food_security_view_adm_specified.admin2_code == 'FOO-XXX-XXX' + ), 'admin2_code should keep its value when admin1_is_unspecified is False' + assert ( + food_security_view_adm_specified.admin2_name == 'District A' + ), 'admin2_name should keep its value when admin1_is_unspecified is False' food_security_view_adm_unspecified = FoodSecurityResponse( population_in_phase=8225, population_fraction_in_phase=0.02, - ipc_phase_code='2', - ipc_phase_name='Phase 2: Stressed', - ipc_type_code='current', - dataset_hdx_stub='test-dataset1', - dataset_hdx_provider_stub='provider01', + ipc_phase='2', + ipc_type='current', resource_hdx_id='test-resource1', + location_ref=1, location_code='Foolandia', location_name='FOO-XXX', + admin1_ref=1, admin1_is_unspecified=True, admin1_code='FOO-XXX', admin1_name='Unpecified', @@ -108,29 +117,38 @@ async def test_get_food_security_adm_fields(event_loop, refresh_db): admin2_code='FOO-XXX', admin2_name='Unspecified', reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + reference_period_end='2023-03-31 23:59:59', ) - assert food_security_view_adm_unspecified.admin1_code is None, \ - 'admin1_code should be changed to None when admin1_is_unspecified is True' - assert food_security_view_adm_unspecified.admin1_name is None, \ - 'admin1_name should be changed to None when admin1_is_unspecified is True' - assert food_security_view_adm_unspecified.admin2_code is None, \ - 'admin2_code should be changed to None when admin1_is_unspecified is True' - assert food_security_view_adm_unspecified.admin2_name is None, \ - 'admin2_name should be changed to None when admin1_is_unspecified is True' + assert ( + food_security_view_adm_unspecified.admin1_code is None + ), 'admin1_code should be changed to None when admin1_is_unspecified is True' + assert ( + food_security_view_adm_unspecified.admin1_name is None + ), 'admin1_name should be changed to None when admin1_is_unspecified is True' + assert ( + food_security_view_adm_unspecified.admin2_code is None + ), 'admin2_code should be changed to None when admin1_is_unspecified is True' + assert ( + food_security_view_adm_unspecified.admin2_name is None + ), 'admin2_name should be changed to None when admin1_is_unspecified is True' @pytest.mark.asyncio async def test_get_food_security_admin_level(event_loop, refresh_db): log.info('started test_get_food_security_admin_level') - async with AsyncClient(app=app, base_url='http://test', ) as ac: + async with AsyncClient( + app=app, + base_url='http://test', + ) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' - response_items = response.json() + response_items = response.json()['data'] admin_0_count = len( [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None] ) @@ -149,4 +167,4 @@ async def test_get_food_security_admin_level(event_loop, refresh_db): for admin_level, count in counts_map.items(): async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()) == count, f'Admin level {admin_level} should return {count} entries' + assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries' diff --git a/tests/test_endpoints/test_age_range_endpoint.py b/tests/test_endpoints/test_funding_endpoint.py similarity index 53% rename from tests/test_endpoints/test_age_range_endpoint.py rename to tests/test_endpoints/test_funding_endpoint.py index cf03c323..a08684c8 100644 --- a/tests/test_endpoints/test_age_range_endpoint.py +++ b/tests/test_endpoints/test_funding_endpoint.py @@ -7,49 +7,57 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/age_range' +ENDPOINT_ROUTER = '/api/v1/coordination-context/funding' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @pytest.mark.asyncio -async def test_get_age_ranges(event_loop, refresh_db): - log.info('started test_get_age_ranges') +async def test_get_fundings(event_loop, refresh_db): + log.info('started test_get_fundings') async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one age range in the database' + assert len(response.json()['data']) > 0, 'There should be at least one funding in the database' @pytest.mark.asyncio -async def test_get_age_range_params(event_loop, refresh_db): - log.info('started test_get_age_range_params') +async def test_get_funding_params(event_loop, refresh_db): + log.info('started test_get_funding_params') for param_name, param_value in query_parameters.items(): async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one age_range entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one funding entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one age_range entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one funding entry for all parameters in the database' @pytest.mark.asyncio -async def test_get_age_range_result(event_loop, refresh_db): - log.info('started test_get_age_range_result') +async def test_get_funding_result(event_loop, refresh_db): + log.info('started test_get_funding_result') async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_humanitarian_needs_endpoint.py b/tests/test_endpoints/test_humanitarian_needs_endpoint.py index b31199f9..582c4339 100644 --- a/tests/test_endpoints/test_humanitarian_needs_endpoint.py +++ b/tests/test_endpoints/test_humanitarian_needs_endpoint.py @@ -8,7 +8,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/themes/humanitarian_needs' +ENDPOINT_ROUTER = '/api/v1/affected-people/humanitarian-needs' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -20,7 +20,7 @@ async def test_get_humanitarian_needs(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one food security entry in the database' + assert len(response.json()['data']) > 0, 'There should be at least one food security entry in the database' @pytest.mark.asyncio @@ -32,16 +32,18 @@ async def test_get_humanitarian_needs_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one humanitarian_needs entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one humanitarian_needs entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one humanitarian_needs entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one humanitarian_needs entry for all parameters in the database' @pytest.mark.asyncio @@ -52,9 +54,11 @@ async def test_get_humanitarian_needs_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' @pytest.mark.asyncio @@ -62,78 +66,103 @@ async def test_get_humanitarian_needs_adm_fields(event_loop, refresh_db): log.info('started test_get_humanitarian_needs_adm_fields') humanitarian_needs_view_adm_specified = HumanitarianNeedsResponse( - gender_code='m', - age_range_code='0-1', - sector_code='ABC', - sector_name='Sector Name', - population_status_code='inneed', - population_group_code='abcd', - dataset_hdx_provider_stub='provider01', - dataset_hdx_stub='test-dataset1', - resource_hdx_id='test-resource1', + resource_hdx_id='17acb541-9431-409a-80a8-50eda7e8ebab', + gender='f', + age_range='1-2', + min_age=1, + max_age=2, + disabled_marker='y', + sector_code='EDU', + population_group='REF', + population_status='AFF', + population=500000, + reference_period_start='2023-01-01 00:00:00', + reference_period_end='2023-03-31 23:59:59', + sector_name='Education', location_code='Foolandia', location_name='FOO-XXX', admin1_is_unspecified=False, + admin1_ref=1, + admin2_ref=1, admin1_code='FOO-XXX', admin1_name='Province 01', admin2_is_unspecified=False, admin2_code='FOO-XXX-XXX', admin2_name='District A', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + location_ref=2, ) - assert humanitarian_needs_view_adm_specified.admin1_code == 'FOO-XXX', \ - 'admin1_code should keep its value when admin1_is_unspecified is False' - assert humanitarian_needs_view_adm_specified.admin1_name == 'Province 01', \ - 'admin1_name should keep its value when admin1_is_unspecified is False' - assert humanitarian_needs_view_adm_specified.admin2_code == 'FOO-XXX-XXX', \ - 'admin2_code should keep its value when admin1_is_unspecified is False' - assert humanitarian_needs_view_adm_specified.admin2_name == 'District A', \ - 'admin2_name should keep its value when admin1_is_unspecified is False' + assert True + + assert ( + humanitarian_needs_view_adm_specified.admin1_code == 'FOO-XXX' + ), 'admin1_code should keep its value when admin1_is_unspecified is False' + assert ( + humanitarian_needs_view_adm_specified.admin1_name == 'Province 01' + ), 'admin1_name should keep its value when admin1_is_unspecified is False' + assert ( + humanitarian_needs_view_adm_specified.admin2_code == 'FOO-XXX-XXX' + ), 'admin2_code should keep its value when admin1_is_unspecified is False' + assert ( + humanitarian_needs_view_adm_specified.admin2_name == 'District A' + ), 'admin2_name should keep its value when admin1_is_unspecified is False' humanitarian_needs_view_adm_unspecified = HumanitarianNeedsResponse( - gender_code='f', - age_range_code='1-2', - sector_code='DEF', - sector_name='Sector_name2', - population_status_code='inneed', - population_group_code='efgh', - dataset_hdx_stub='test-dataset2', - dataset_hdx_provider_stub='provider02', - resource_hdx_id='test-resource1', + resource_hdx_id='17acb541-9431-409a-80a8-50eda7e8ebab', + gender='f', + age_range='1-2', + min_age=1, + max_age=2, + disabled_marker='y', + sector_code='EDU', + population_group='REF', + population_status='AFF', + population=500000, + reference_period_start='2023-01-01 00:00:00', + reference_period_end='2023-03-31 23:59:59', + sector_name='Education', location_code='Foolandia', location_name='FOO-XXX', admin1_is_unspecified=True, + admin1_ref=1, + admin2_ref=1, admin1_code='FOO-XXX', - admin1_name='Unpecified', + admin1_name='Unspecified', admin2_is_unspecified=True, admin2_code='FOO-XXX', admin2_name='Unspecified', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + location_ref=2, ) - assert humanitarian_needs_view_adm_unspecified.admin1_code is None, \ - 'admin1_code should be changed to None when admin1_is_unspecified is True' - assert humanitarian_needs_view_adm_unspecified.admin1_name is None, \ - 'admin1_name should be changed to None when admin1_is_unspecified is True' - assert humanitarian_needs_view_adm_unspecified.admin2_code is None, \ - 'admin2_code should be changed to None when admin1_is_unspecified is True' - assert humanitarian_needs_view_adm_unspecified.admin2_name is None, \ - 'admin2_name should be changed to None when admin1_is_unspecified is True' + assert ( + humanitarian_needs_view_adm_unspecified.admin1_code is None + ), 'admin1_code should be changed to None when admin1_is_unspecified is True' + assert ( + humanitarian_needs_view_adm_unspecified.admin1_name is None + ), 'admin1_name should be changed to None when admin1_is_unspecified is True' + assert ( + humanitarian_needs_view_adm_unspecified.admin2_code is None + ), 'admin2_code should be changed to None when admin1_is_unspecified is True' + assert ( + humanitarian_needs_view_adm_unspecified.admin2_name is None + ), 'admin2_name should be changed to None when admin1_is_unspecified is True' @pytest.mark.asyncio async def test_get_humanitarian_needs_admin_level(event_loop, refresh_db): log.info('started test_get_humanitarian_needs_admin_level') - async with AsyncClient(app=app, base_url='http://test', ) as ac: + async with AsyncClient( + app=app, + base_url='http://test', + ) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' - response_items = response.json() + response_items = response.json()['data'] admin_0_count = len( [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None] ) @@ -152,4 +181,4 @@ async def test_get_humanitarian_needs_admin_level(event_loop, refresh_db): for admin_level, count in counts_map.items(): async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()) == count, f'Admin level {admin_level} should return {count} entries' \ No newline at end of file + assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries' diff --git a/tests/test_endpoints/test_location_endpoint.py b/tests/test_endpoints/test_location_endpoint.py index dc9cb839..03880ad4 100644 --- a/tests/test_endpoints/test_location_endpoint.py +++ b/tests/test_endpoints/test_location_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/location' +ENDPOINT_ROUTER = '/api/v1/metadata/location' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_locations(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one location in the database' + assert len(response.json()['data']) > 0, 'There should be at least one location in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_location_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one location entry for parameter ' \ - f'"{param_name}" with value "{param_value}" in the database' + assert len(response.json()['data']) > 0, ( + 'There should be at least one location entry for parameter ' + f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one location entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one location entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +53,8 @@ async def test_get_location_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_national_risk_endpoint.py b/tests/test_endpoints/test_national_risk_endpoint.py index 93137111..fd3eda60 100644 --- a/tests/test_endpoints/test_national_risk_endpoint.py +++ b/tests/test_endpoints/test_national_risk_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/themes/national_risk' +ENDPOINT_ROUTER = '/api/v1/coordination-context/national-risk' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_national_risks(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one national risk entry in the database' + assert len(response.json()['data']) > 0, 'There should be at least one national risk entry in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_national_risk_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one national risk entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one national risk entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one national risk entry for all parameters in the db' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one national risk entry for all parameters in the db' @pytest.mark.asyncio @@ -50,6 +53,8 @@ async def test_get_national_risk_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_operational_presence_endpoint.py b/tests/test_endpoints/test_operational_presence_endpoint.py index 9aec4cf4..cbfc1411 100644 --- a/tests/test_endpoints/test_operational_presence_endpoint.py +++ b/tests/test_endpoints/test_operational_presence_endpoint.py @@ -1,3 +1,4 @@ +from datetime import datetime import pytest import logging @@ -8,7 +9,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/themes/3W' +ENDPOINT_ROUTER = '/api/v1/coordination-context/operational-presence' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -20,7 +21,7 @@ async def test_get_operational_presences(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one operational presence in the database' + assert len(response.json()['data']) > 0, 'There should be at least one operational presence in the database' @pytest.mark.asyncio @@ -32,16 +33,18 @@ async def test_get_operational_presence_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one operational_presence entry for parameter ' \ + assert len(response.json()['data']) > 0, ( + 'There should be at least one operational_presence entry for parameter ' f'"{param_name}" with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one operational_presence entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one operational_presence entry for all parameters in the database' @pytest.mark.asyncio @@ -52,9 +55,11 @@ async def test_get_operational_presence_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' @pytest.mark.asyncio @@ -63,73 +68,92 @@ async def test_get_operational_presence_adm_fields(event_loop, refresh_db): operational_presence_view_adm_specified = OperationalPresenceResponse( sector_code='ABC', - age_range_code='0-1', - dataset_hdx_stub='test-dataset1', resource_hdx_id='test-resource1', org_acronym='ORG01', org_name='Organisation 1', + org_type_code='unimportant', + org_type_description='Unimportant', sector_name='Sector Name', + location_ref=1, location_code='Foolandia', location_name='FOO-XXX', + admin1_ref=1, admin1_is_unspecified=False, admin1_code='FOO-XXX', admin1_name='Province 01', + admin2_ref=1, admin2_is_unspecified=False, admin2_code='FOO-XXX-XXX', admin2_name='District A', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'), + reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'), ) - assert operational_presence_view_adm_specified.admin1_code == 'FOO-XXX', \ - 'admin1_code should keep its value when admin1_is_unspecified is False' - assert operational_presence_view_adm_specified.admin1_name == 'Province 01', \ - 'admin1_name should keep its value when admin1_is_unspecified is False' - assert operational_presence_view_adm_specified.admin2_code == 'FOO-XXX-XXX', \ - 'admin2_code should keep its value when admin1_is_unspecified is False' - assert operational_presence_view_adm_specified.admin2_name == 'District A', \ - 'admin2_name should keep its value when admin1_is_unspecified is False' + assert ( + operational_presence_view_adm_specified.admin1_code == 'FOO-XXX' + ), 'admin1_code should keep its value when admin1_is_unspecified is False' + assert ( + operational_presence_view_adm_specified.admin1_name == 'Province 01' + ), 'admin1_name should keep its value when admin1_is_unspecified is False' + assert ( + operational_presence_view_adm_specified.admin2_code == 'FOO-XXX-XXX' + ), 'admin2_code should keep its value when admin1_is_unspecified is False' + assert ( + operational_presence_view_adm_specified.admin2_name == 'District A' + ), 'admin2_name should keep its value when admin1_is_unspecified is False' operational_presence_view_adm_unspecified = OperationalPresenceResponse( sector_code='ABC', - age_range_code='0-1', - dataset_hdx_stub='test-dataset1', resource_hdx_id='test-resource1', org_acronym='ORG01', org_name='Organisation 1', + org_type_code='unimportant', + org_type_description='Unimportant', sector_name='Sector Name', + location_ref=1, location_code='Foolandia', location_name='FOO-XXX', admin1_is_unspecified=True, + admin1_ref=1, admin1_code='FOO-XXX', admin1_name='Unpecified', + admin2_ref=1, admin2_is_unspecified=True, - admin2_code='FOO-XXX', + admin2_code='FOO-XXX-XXX', admin2_name='Unspecified', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + reference_period_start=datetime.strptime('2023-01-01 00:00:00', '%Y-%m-%d %H:%M:%S'), + reference_period_end=datetime.strptime('2023-03-31 23:59:59', '%Y-%m-%d %H:%M:%S'), ) - assert operational_presence_view_adm_unspecified.admin1_code is None, \ - 'admin1_code should be changed to None when admin1_is_unspecified is True' - assert operational_presence_view_adm_unspecified.admin1_name is None, \ - 'admin1_name should be changed to None when admin1_is_unspecified is True' - assert operational_presence_view_adm_unspecified.admin2_code is None, \ - 'admin2_code should be changed to None when admin1_is_unspecified is True' - assert operational_presence_view_adm_unspecified.admin2_name is None, \ - 'admin2_name should be changed to None when admin1_is_unspecified is True' + assert ( + operational_presence_view_adm_unspecified.admin1_code is None + ), 'admin1_code should be changed to None when admin1_is_unspecified is True' + assert ( + operational_presence_view_adm_unspecified.admin1_name is None + ), 'admin1_name should be changed to None when admin1_is_unspecified is True' + assert ( + operational_presence_view_adm_unspecified.admin2_code is None + ), 'admin2_code should be changed to None when admin1_is_unspecified is True' + assert ( + operational_presence_view_adm_unspecified.admin2_name is None + ), 'admin2_name should be changed to None when admin1_is_unspecified is True' @pytest.mark.asyncio async def test_get_operational_presence_admin_level(event_loop, refresh_db): log.info('started test_get_operational_presence_admin_level') - async with AsyncClient(app=app, base_url='http://test', ) as ac: + async with AsyncClient( + app=app, + base_url='http://test', + ) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' - response_items = response.json() + response_items = response.json()['data'] admin_0_count = len( [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None] ) @@ -148,4 +172,4 @@ async def test_get_operational_presence_admin_level(event_loop, refresh_db): for admin_level, count in counts_map.items(): async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()) == count, f'Admin level {admin_level} should return {count} entries' \ No newline at end of file + assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries' diff --git a/tests/test_endpoints/test_org_endpoint.py b/tests/test_endpoints/test_org_endpoint.py index e6d19869..31fa73a8 100644 --- a/tests/test_endpoints/test_org_endpoint.py +++ b/tests/test_endpoints/test_org_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/org' +ENDPOINT_ROUTER = '/api/v1/metadata/org' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_orgs(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one org in the database' + assert len(response.json()['data']) > 0, 'There should be at least one org in the database' @pytest.mark.asyncio @@ -31,15 +31,16 @@ async def test_get_org_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one org entry for parameter "{param_name}" with value "{param_value}" ' \ + assert len(response.json()['data']) > 0, ( + f'There should be at least one org entry for parameter "{param_name}" with value "{param_value}" ' 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one org entry for all parameters in the database' + assert len(response.json()['data']) > 0, 'There should be at least one org entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +51,8 @@ async def test_get_org_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_org_type_endpoint.py b/tests/test_endpoints/test_org_type_endpoint.py index 06358163..05992f15 100644 --- a/tests/test_endpoints/test_org_type_endpoint.py +++ b/tests/test_endpoints/test_org_type_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/org_type' +ENDPOINT_ROUTER = '/api/v1/metadata/org-type' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_org_types(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one org type in the database' + assert len(response.json()['data']) > 0, 'There should be at least one org type in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_org_type_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one org_type entry for parameter "{param_name}" ' \ + assert len(response.json()['data']) > 0, ( + f'There should be at least one org_type entry for parameter "{param_name}" ' f'with value "{param_value}" in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one org_type entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one org_type entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +53,8 @@ async def test_get_org_type_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_output_format.py b/tests/test_endpoints/test_output_format.py index 25ef2c1a..b3f7f663 100644 --- a/tests/test_endpoints/test_output_format.py +++ b/tests/test_endpoints/test_output_format.py @@ -11,23 +11,24 @@ # query_parameters = endpoint_data['query_parameters'] # expected_fields = endpoint_data['expected_fields'] ENDPOINT_ROUTER_LIST = [ - '/api/v1/admin1', - '/api/v1/admin2', - '/api/v1/age_range', - '/api/v1/dataset', - '/api/v1/gender', - '/api/v1/location', - '/api/v1/themes/3W', - '/api/v1/org', - '/api/v1/org_type', - '/api/v1/themes/population', - '/api/v1/population_group', - '/api/v1/population_status', - '/api/v1/themes/food_security', - '/api/v1/themes/national_risk', - '/api/v1/themes/humanitarian_needs', - '/api/v1/resource', - '/api/v1/sector', + '/api/v1/metadata/admin1', + '/api/v1/metadata/admin2', + '/api/v1/metadata/dataset', + '/api/v1/affected-people/humanitarian-needs', + '/api/v1/metadata/location', + '/api/v1/metadata/org', + '/api/v1/metadata/org-type', + '/api/v1/metadata/resource', + '/api/v1/metadata/sector', + '/api/v1/population-social/population', + '/api/v1/population-social/poverty-rate', + '/api/v1/coordination-context/national-risk', + '/api/v1/coordination-context/operational-presence', + '/api/v1/affected-people/refugees', + '/api/v1/coordination-context/funding', + '/api/v1/coordination-context/conflict-event', + '/api/v1/food/food-security', + '/api/v1/metadata/currency', ] @@ -40,7 +41,7 @@ async def test_output_format(event_loop, refresh_db, endpoint_router): response = await ac.get(endpoint_router) assert response.status_code == 200 assert response.headers.get('content-type') == 'application/json', 'The output should be in json format' - no_rows_json = len(response.json()) + no_rows_json = len(response.json()['data']) assert no_rows_json > 0 # CSV diff --git a/tests/test_endpoints/test_population_endpoint.py b/tests/test_endpoints/test_population_endpoint.py index 0bddb69b..b1813f76 100644 --- a/tests/test_endpoints/test_population_endpoint.py +++ b/tests/test_endpoints/test_population_endpoint.py @@ -2,13 +2,15 @@ import logging from httpx import AsyncClient +from hapi_schema.utils.enums import Gender + from hdx_hapi.endpoints.models.population import PopulationResponse from main import app from tests.test_endpoints.endpoint_data import endpoint_data log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/themes/population' +ENDPOINT_ROUTER = '/api/v1/population-social/population' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -20,7 +22,7 @@ async def test_get_populations(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population entry in the database' + assert len(response.json()['data']) > 0, 'There should be at least one population entry in the database' @pytest.mark.asyncio @@ -31,16 +33,12 @@ async def test_get_population_params(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) + log.info(f'{param_name}:{param_value} - {len(response.json()["data"]) } rows') assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one population entry for parameter "{param_name}" with value "{param_value}" ' \ + assert len(response.json()['data']) > 0, ( + f'There should be at least one population entry for parameter "{param_name}" with value "{param_value}" ' 'in the database' - - async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: - response = await ac.get(ENDPOINT_ROUTER) - - assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population entry for all parameters in the database' + ) @pytest.mark.asyncio @@ -51,80 +49,101 @@ async def test_get_population_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' @pytest.mark.asyncio async def test_get_population_adm_fields(event_loop, refresh_db): log.info('started test_get_population_adm_fields') - population_view_adm_specified = PopulationResponse( - gender_code='f', - age_range_code='0-1', - population=1, - dataset_hdx_stub='test-dataset1', - resource_hdx_id='test-resource1', - location_code='Foolandia', - location_name='FOO-XXX', - admin1_is_unspecified=False, + resource_hdx_id='foo', + admin2_ref=1, + gender=Gender.MALE, + age_range='10-14', + min_age=10, + max_age=14, + population=100, + reference_period_start='2023-01-01 00:00:00', + reference_period_end='2023-03-31 23:59:59', + location_ref=1, + location_code='FOO', + location_name='Foolandia', + admin1_ref=1, admin1_code='FOO-XXX', admin1_name='Province 01', - admin2_is_unspecified=False, + admin1_is_unspecified=False, admin2_code='FOO-XXX-XXX', admin2_name='District A', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + admin2_is_unspecified=False, ) - - assert population_view_adm_specified.admin1_code == 'FOO-XXX', \ - 'admin1_code should keep its value when admin1_is_unspecified is False' - assert population_view_adm_specified.admin1_name == 'Province 01', \ - 'admin1_name should keep its value when admin1_is_unspecified is False' - assert population_view_adm_specified.admin2_code == 'FOO-XXX-XXX', \ - 'admin2_code should keep its value when admin1_is_unspecified is False' - assert population_view_adm_specified.admin2_name == 'District A', \ - 'admin2_name should keep its value when admin1_is_unspecified is False' + assert ( + population_view_adm_specified.admin1_code == 'FOO-XXX' + ), 'admin1_code should keep its value when admin1_is_unspecified is False' + assert ( + population_view_adm_specified.admin1_name == 'Province 01' + ), 'admin1_name should keep its value when admin1_is_unspecified is False' + assert ( + population_view_adm_specified.admin2_code == 'FOO-XXX-XXX' + ), 'admin2_code should keep its value when admin1_is_unspecified is False' + assert ( + population_view_adm_specified.admin2_name == 'District A' + ), 'admin2_name should keep its value when admin1_is_unspecified is False' population_view_adm_unspecified = PopulationResponse( - gender_code='f', - age_range_code='0-1', - population=1, - dataset_hdx_stub='test-dataset1', - resource_hdx_id='test-resource1', - location_code='Foolandia', - location_name='FOO-XXX', - admin1_is_unspecified=True, + resource_hdx_id='foo', + admin2_ref=1, + gender=Gender.MALE, + age_range='10-14', + min_age=10, + max_age=14, + population=100, + reference_period_start='2023-01-01 00:00:00', + reference_period_end='2023-03-31 23:59:59', + location_ref=1, + location_code='FOO', + location_name='Foolandia', + admin1_ref=1, admin1_code='FOO-XXX', - admin1_name='Unpecified', - admin2_is_unspecified=True, - admin2_code='FOO-XXX', + admin1_name='Unspecified', + admin1_is_unspecified=True, + admin2_code='FOO-XXX-XXX', admin2_name='Unspecified', - reference_period_start='2023-01-01 00:00:00', - reference_period_end='2023-03-31 23:59:59' + admin2_is_unspecified=True, ) - assert population_view_adm_unspecified.admin1_code is None, \ - 'admin1_code should be changed to None when admin1_is_unspecified is True' - assert population_view_adm_unspecified.admin1_name is None, \ - 'admin1_name should be changed to None when admin1_is_unspecified is True' - assert population_view_adm_unspecified.admin2_code is None, \ - 'admin2_code should be changed to None when admin1_is_unspecified is True' - assert population_view_adm_unspecified.admin2_name is None, \ - 'admin2_name should be changed to None when admin1_is_unspecified is True' + assert ( + population_view_adm_unspecified.admin1_code is None + ), 'admin1_code should be changed to None when admin1_is_unspecified is True' + assert ( + population_view_adm_unspecified.admin1_name is None + ), 'admin1_name should be changed to None when admin1_is_unspecified is True' + assert ( + population_view_adm_unspecified.admin2_code is None + ), 'admin2_code should be changed to None when admin1_is_unspecified is True' + assert ( + population_view_adm_unspecified.admin2_name is None + ), 'admin2_name should be changed to None when admin1_is_unspecified is True' @pytest.mark.asyncio async def test_get_population_admin_level(event_loop, refresh_db): log.info('started test_get_population_admin_level') - async with AsyncClient(app=app, base_url='http://test', ) as ac: + async with AsyncClient( + app=app, + base_url='http://test', + ) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' - response_items = response.json() + response_items = response.json()['data'] admin_0_count = len( [item for item in response_items if item['admin1_name'] is None and item['admin2_name'] is None] ) @@ -140,7 +159,10 @@ async def test_get_population_admin_level(event_loop, refresh_db): '2': admin_2_count, } + for item in response_items: + log.info(f"{item['admin1_name']}, {item['admin2_name']}") + log.info(counts_map) for admin_level, count in counts_map.items(): async with AsyncClient(app=app, base_url='http://test', params={'admin_level': admin_level}) as ac: response = await ac.get(ENDPOINT_ROUTER) - assert len(response.json()) == count, f'Admin level {admin_level} should return {count} entries' \ No newline at end of file + assert len(response.json()['data']) == count, f'Admin level {admin_level} should return {count} entries' diff --git a/tests/test_endpoints/test_gender_endpoint.py b/tests/test_endpoints/test_poverty_rate_endpoint.py similarity index 51% rename from tests/test_endpoints/test_gender_endpoint.py rename to tests/test_endpoints/test_poverty_rate_endpoint.py index dbf65f5d..68de483e 100644 --- a/tests/test_endpoints/test_gender_endpoint.py +++ b/tests/test_endpoints/test_poverty_rate_endpoint.py @@ -1,54 +1,60 @@ import pytest import logging + from httpx import AsyncClient from main import app from tests.test_endpoints.endpoint_data import endpoint_data log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/gender' +ENDPOINT_ROUTER = '/api/v1/population-social/poverty-rate' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @pytest.mark.asyncio -async def test_get_genders(event_loop, refresh_db): - log.info('started test_get_genders') +async def test_get_poverty_rates(event_loop, refresh_db): + log.info('started test_get_povert_rates') async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one gender in the database' + assert len(response.json()['data']) > 0, 'There should be at least one povery rate entry in the database' @pytest.mark.asyncio -async def test_get_gender_params(event_loop, refresh_db): - log.info('started test_get_gender_params') +async def test_get_poverty_rate_params(event_loop, refresh_db): + log.info('started test_get_poverty_rate_params') for param_name, param_value in query_parameters.items(): async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - 'There should be at least one gender entry for parameter ' \ - f'"{param_name}" with value "{param_value}" in the database' + assert len(response.json()['data']) > 0, ( + f'There should be at least one poverty rate entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one gender entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one poverty rate entry for all parameters in the database' @pytest.mark.asyncio -async def test_get_gender_result(event_loop, refresh_db): - log.info('started test_get_gender_result') +async def test_get_poverty_rate_result(event_loop, refresh_db): + log.info('started test_get_poverty_rate_result') async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_population_group_endpoint.py b/tests/test_endpoints/test_refugees_endpoint.py similarity index 52% rename from tests/test_endpoints/test_population_group_endpoint.py rename to tests/test_endpoints/test_refugees_endpoint.py index 1f7fdc54..a8409ca8 100644 --- a/tests/test_endpoints/test_population_group_endpoint.py +++ b/tests/test_endpoints/test_refugees_endpoint.py @@ -7,49 +7,54 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/population_group' +ENDPOINT_ROUTER = '/api/v1/affected-people/refugees' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @pytest.mark.asyncio -async def test_get_population_group(event_loop, refresh_db): - log.info('started test_get_population_group') +async def test_get_refugees(event_loop, refresh_db): + log.info('started test_get_refugees') async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population group in the database' + assert len(response.json()['data']) > 0, 'There should be at least one refugees entry in the database' @pytest.mark.asyncio -async def test_get_population_group_params(event_loop, refresh_db): - log.info('started test_get_population_group_params') +async def test_get_refugee_params(event_loop, refresh_db): + log.info('started test_get_refugee_params') for param_name, param_value in query_parameters.items(): async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one population group entry for parameter "{param_name}" ' \ - f'with value "{param_value}" in the database' + assert len(response.json()['data']) > 0, ( + f'There should be at least one refugee entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one population group for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one refugee entry for all parameters in the database' @pytest.mark.asyncio -async def test_get_population_group_result(event_loop, refresh_db): - log.info('started test_get_population_group_result') +async def test_get_refugee_result(event_loop, refresh_db): + log.info('started test_get_refugee_result') async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_resource_endpoint.py b/tests/test_endpoints/test_resource_endpoint.py index d1db6fe5..2ac9e858 100644 --- a/tests/test_endpoints/test_resource_endpoint.py +++ b/tests/test_endpoints/test_resource_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/resource' +ENDPOINT_ROUTER = '/api/v1/metadata/resource' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_resources(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one resource in the database' + assert len(response.json()['data']) > 0, 'There should be at least one resource in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_resource_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one resource entry for parameter "{param_name}" with value "{param_value}" ' \ - 'in the database' + assert len(response.json()['data']) > 0, ( + f'There should be at least one resource entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one resource entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one resource entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +53,11 @@ async def test_get_resource_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_sector_endpoint.py b/tests/test_endpoints/test_sector_endpoint.py index a501367f..11933ce9 100644 --- a/tests/test_endpoints/test_sector_endpoint.py +++ b/tests/test_endpoints/test_sector_endpoint.py @@ -7,7 +7,7 @@ log = logging.getLogger(__name__) -ENDPOINT_ROUTER = '/api/sector' +ENDPOINT_ROUTER = '/api/v1/metadata/sector' endpoint_data = endpoint_data[ENDPOINT_ROUTER] query_parameters = endpoint_data['query_parameters'] expected_fields = endpoint_data['expected_fields'] @@ -19,7 +19,7 @@ async def test_get_sectors(event_loop, refresh_db): async with AsyncClient(app=app, base_url='http://test') as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one sector in the database' + assert len(response.json()['data']) > 0, 'There should be at least one sector in the database' @pytest.mark.asyncio @@ -31,15 +31,18 @@ async def test_get_sector_params(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, \ - f'There should be at least one sector entry for parameter "{param_name}" with value "{param_value}" '\ + assert len(response.json()['data']) > 0, ( + f'There should be at least one sector entry for parameter "{param_name}" with value "{param_value}" ' 'in the database' + ) async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: response = await ac.get(ENDPOINT_ROUTER) assert response.status_code == 200 - assert len(response.json()) > 0, 'There should be at least one sector entry for all parameters in the database' + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one sector entry for all parameters in the database' @pytest.mark.asyncio @@ -50,6 +53,8 @@ async def test_get_sector_result(event_loop, refresh_db): response = await ac.get(ENDPOINT_ROUTER) for field in expected_fields: - assert field in response.json()[0], f'Field "{field}" not found in the response' + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' - assert len(response.json()[0]) == len(expected_fields), 'Response has a different number of fields than expected' + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_version.py b/tests/test_endpoints/test_version.py new file mode 100644 index 00000000..5b249d6d --- /dev/null +++ b/tests/test_endpoints/test_version.py @@ -0,0 +1,25 @@ +import pytest +import logging + +from httpx import AsyncClient +from main import app +from hdx_hapi.endpoints.util import version as hapi_version + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER = '/api/v1/util/version' + + +@pytest.mark.asyncio +async def test_version(): + log.info('started test_version') + + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert len(response.json()) == 2, 'Response has a different number of fields than expected' + assert response.json() == { + 'api_version': hapi_version.api_version, + 'hapi_sqlalchemy_schema_version': hapi_version.hapi_sqlalchemy_schema_version, + } diff --git a/tests/test_endpoints/test_wfp_commodity_endpoint.py b/tests/test_endpoints/test_wfp_commodity_endpoint.py new file mode 100644 index 00000000..0c5b162a --- /dev/null +++ b/tests/test_endpoints/test_wfp_commodity_endpoint.py @@ -0,0 +1,63 @@ +import pytest +import logging + +from httpx import AsyncClient +from main import app +from tests.test_endpoints.endpoint_data import endpoint_data + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER = '/api/v1/metadata/wfp-commodity' +endpoint_data = endpoint_data[ENDPOINT_ROUTER] +query_parameters = endpoint_data['query_parameters'] +expected_fields = endpoint_data['expected_fields'] + + +@pytest.mark.asyncio +async def test_get_wfp_commodities(event_loop, refresh_db): + log.info('started test_get_wfp_commodities') + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(ENDPOINT_ROUTER) + assert response.status_code == 200 + assert len(response.json()['data']) > 0, 'There should be at least one wfp_commodity in the database' + + +@pytest.mark.asyncio +async def test_get_wfp_commodity_params(event_loop, refresh_db): + log.info('started test_get_wfp_commodity_params') + + for param_name, param_value in query_parameters.items(): + async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert len(response.json()['data']) > 0, ( + f'There should be at least one wfp_commodity entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one wfp_commodity entry for all parameters in the database' + + +@pytest.mark.asyncio +async def test_get_wfp_commodity_result(event_loop, refresh_db): + log.info('started test_get_wfp_commodity_result') + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + for field in expected_fields: + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' + + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_endpoints/test_wfp_market_endpoint.py b/tests/test_endpoints/test_wfp_market_endpoint.py new file mode 100644 index 00000000..1a18632b --- /dev/null +++ b/tests/test_endpoints/test_wfp_market_endpoint.py @@ -0,0 +1,63 @@ +import pytest +import logging + +from httpx import AsyncClient +from main import app +from tests.test_endpoints.endpoint_data import endpoint_data + +log = logging.getLogger(__name__) + +ENDPOINT_ROUTER = '/api/v1/metadata/wfp-market' +endpoint_data = endpoint_data[ENDPOINT_ROUTER] +query_parameters = endpoint_data['query_parameters'] +expected_fields = endpoint_data['expected_fields'] + + +@pytest.mark.asyncio +async def test_get_wfp_markets(event_loop, refresh_db): + log.info('started test_get_wfp_marketss') + async with AsyncClient(app=app, base_url='http://test') as ac: + response = await ac.get(ENDPOINT_ROUTER) + assert response.status_code == 200 + assert len(response.json()['data']) > 0, 'There should be at least one wfp_market in the database' + + +@pytest.mark.asyncio +async def test_get_wfp_market_params(event_loop, refresh_db): + log.info('started test_get_wfp_market_params') + + for param_name, param_value in query_parameters.items(): + async with AsyncClient(app=app, base_url='http://test', params={param_name: param_value}) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert len(response.json()['data']) > 0, ( + f'There should be at least one wfp_market entry for parameter "{param_name}" with value "{param_value}" ' + 'in the database' + ) + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + assert response.status_code == 200 + assert ( + len(response.json()['data']) > 0 + ), 'There should be at least one wfp_market entry for all parameters in the database' + + +@pytest.mark.asyncio +async def test_get_wfp_market_result(event_loop, refresh_db): + log.info('started test_get_wfp_market_result') + + async with AsyncClient(app=app, base_url='http://test', params=query_parameters) as ac: + response = await ac.get(ENDPOINT_ROUTER) + + for field in expected_fields: + assert field in response.json()['data'][0], f'Field "{field}" not found in the response' + + for field in response.json()['data'][0]: + assert field in expected_fields, f'Field "{field}" unexpected' + + assert len(response.json()['data'][0]) == len( + expected_fields + ), 'Response has a different number of fields than expected' diff --git a/tests/test_helpers/test_url_helpers.py b/tests/test_helpers/test_url_helpers.py index 738c8dfa..8ce1aa69 100644 --- a/tests/test_helpers/test_url_helpers.py +++ b/tests/test_helpers/test_url_helpers.py @@ -5,11 +5,11 @@ from hdx_hapi.services.hdx_url_logic import get_dataset_url, get_dataset_api_url - log = logging.getLogger(__name__) config = get_config() + def test_helper_get_dataset_url(): log.info('started test_helper_get_dataset_url') @@ -21,15 +21,16 @@ def test_helper_get_dataset_url(): assert dataset_url == expected_link dataset_view = DatasetResponse( - hdx_id=dataset_id, - hdx_stub=dataset_id, - title='Test Dataset #1', + dataset_hdx_id=dataset_id, + dataset_hdx_stub=dataset_id, + dataset_hdx_title='Test Dataset #1', hdx_provider_stub='test-provider', - hdx_provider_name='Test Provider' + hdx_provider_name='Test Provider', ) assert dataset_view.hdx_link == expected_link + def test_helper_get_dataset_api_url(): log.info('started test_helper_get_dataset_api_url') @@ -41,15 +42,16 @@ def test_helper_get_dataset_api_url(): assert dataset_api_url == expected_link dataset_view = DatasetResponse( - hdx_id=dataset_id, - hdx_stub=dataset_id, - title='Test Dataset #2', + dataset_hdx_id=dataset_id, + dataset_hdx_stub=dataset_id, + dataset_hdx_title='Test Dataset #2', hdx_provider_stub='test-provider2', - hdx_provider_name='Test Provider 2' + hdx_provider_name='Test Provider 2', ) assert dataset_view.hdx_api_link == expected_link + # def test_helper_get_organization_url(): # log.info('started test_helper_get_organization_url')