Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HDX-9819 Adapt the dataset and resource endpoints to the new schema #93

4 changes: 4 additions & 0 deletions hdx_hapi/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class Config:
HDX_DATASET_URL: str
HDX_DATASET_API_URL: str
HDX_ORGANIZATION_URL: str
HDX_ORGANIZATION_API_URL: str

HDX_RESOURCE_URL: str
HDX_RESOURCE_API_URL: str
Expand Down Expand Up @@ -54,6 +55,9 @@ def get_config() -> Config:
'HDX_RESOURCE_API_URL', '{domain}/api/action/resource_show?id={resource_id}'
),
HDX_ORGANIZATION_URL=os.getenv('HDX_ORGANIZATION_URL', '{domain}/organization/{org_id}'),
HDX_ORGANIZATION_API_URL=os.getenv(
'HDX_ORGANIZATION_API_URL', '{domain}/api/action/organization_show?id={org_id}'
),
HAPI_READTHEDOCS_OVERVIEW_URL=os.getenv(
'HAPI_READTHEDOCS_OVERVIEW_URL', 'https://hdx-hapi.readthedocs.io/en/latest/'
),
Expand Down
2 changes: 1 addition & 1 deletion hdx_hapi/db/dao/dataset_view_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select

from hdx_hapi.db.models.views.db_dataset_view import DatasetView
from hdx_hapi.db.models.views.all_views import DatasetView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams

Expand Down
9 changes: 1 addition & 8 deletions hdx_hapi/db/dao/resource_view_dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select

from hdx_hapi.db.models.views.db_resource_view import ResourceView
from hdx_hapi.db.models.views.all_views import ResourceView
from hdx_hapi.db.dao.util.util import apply_pagination, case_insensitive_filter
from hdx_hapi.endpoints.util.util import PaginationParams



async def resources_view_list(
pagination_parameters: PaginationParams,
db: AsyncSession,
Expand All @@ -18,8 +17,6 @@ async def resources_view_list(
is_hxl: bool = None,
hapi_updated_date_min: datetime = None,
hapi_updated_date_max: datetime = None,
hapi_replaced_date_min: datetime = None,
hapi_replaced_date_max: datetime = None,
dataset_title: str = None,
dataset_hdx_id: str = None,
dataset_hdx_stub: str = None,
Expand All @@ -41,10 +38,6 @@ async def resources_view_list(
query = query.where(ResourceView.hapi_updated_date >= hapi_updated_date_min)
if hapi_updated_date_max:
query = query.where(ResourceView.hapi_updated_date < hapi_updated_date_max)
if hapi_replaced_date_min:
query = query.where(ResourceView.hapi_replaced_date >= hapi_replaced_date_min)
if hapi_replaced_date_max:
query = query.where(ResourceView.hapi_replaced_date < hapi_replaced_date_max)
if dataset_title:
query = query.where(ResourceView.dataset_title == dataset_title)
if dataset_hdx_id:
Expand Down
32 changes: 4 additions & 28 deletions hdx_hapi/endpoints/get_hdx_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@
DOC_SEE_DATASET,
DOC_UPDATE_DATE_MAX,
DOC_UPDATE_DATE_MIN,
DOC_HAPI_UPDATED_DATE_MIN,
DOC_HAPI_UPDATED_DATE_MAX,
DOC_HAPI_REPLACED_DATE_MIN,
DOC_HAPI_REPLACED_DATE_MAX,
)

from hdx_hapi.endpoints.models.base import HapiGenericResponse
Expand All @@ -42,13 +38,13 @@


@router.get(
'/api/dataset',
'/api/metadata/dataset',
response_model=HapiGenericResponse[DatasetResponse],
summary='Get information about the sources of the data in HAPI',
include_in_schema=False,
)
@router.get(
'/api/v1/dataset',
'/api/v1/metadata/dataset',
response_model=HapiGenericResponse[DatasetResponse],
summary='Get information about the sources of the data in HAPI',
)
Expand Down Expand Up @@ -79,13 +75,13 @@ async def get_datasets(


@router.get(
'/api/resource',
'/api/metadata/resource',
response_model=HapiGenericResponse[ResourceResponse],
summary='Get information about the sources of the data in HAPI',
include_in_schema=False,
)
@router.get(
'/api/v1/resource',
'/api/v1/metadata/resource',
response_model=HapiGenericResponse[ResourceResponse],
summary='Get information about the sources of the data in HAPI',
)
Expand All @@ -103,22 +99,6 @@ async def get_resources(
Query(description=f'{DOC_UPDATE_DATE_MAX}', openapi_examples={'2024-12-31': {'value': '2024-12-31'}}),
] = None,
is_hxl: Annotated[bool, Query(description=f'{DOC_HDX_RESOURCE_HXL}')] = None,
hapi_updated_date_min: Annotated[
NaiveDatetime | date,
Query(description=f'{DOC_HAPI_UPDATED_DATE_MIN}'),
] = None,
hapi_updated_date_max: Annotated[
NaiveDatetime | date,
Query(description=f'{DOC_HAPI_UPDATED_DATE_MAX}'),
] = None,
hapi_replaced_date_min: Annotated[
NaiveDatetime | date,
Query(description=f'{DOC_HAPI_REPLACED_DATE_MIN}'),
] = None,
hapi_replaced_date_max: Annotated[
NaiveDatetime | date,
Query(description=f'{DOC_HAPI_REPLACED_DATE_MAX}'),
] = None,
dataset_hdx_id: Annotated[str, Query(max_length=36, description=f'{DOC_HDX_DATASET_ID} {DOC_SEE_DATASET} ')] = None,
dataset_hdx_stub: Annotated[
str, Query(max_length=128, description=f'{DOC_HDX_DATASET_NAME} {DOC_SEE_DATASET}')
Expand All @@ -142,10 +122,6 @@ async def get_resources(
update_date_min=update_date_min,
update_date_max=update_date_max,
is_hxl=is_hxl,
hapi_updated_date_min=hapi_updated_date_min,
hapi_updated_date_max=hapi_updated_date_max,
hapi_replaced_date_min=hapi_replaced_date_min,
hapi_replaced_date_max=hapi_replaced_date_max,
dataset_hdx_id=dataset_hdx_id,
dataset_hdx_stub=dataset_hdx_stub,
dataset_title=dataset_title,
Expand Down
35 changes: 29 additions & 6 deletions hdx_hapi/endpoints/models/hdx_metadata.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Optional
from typing import List
from datetime import datetime
from pydantic import ConfigDict, Field, HttpUrl, computed_field
from hdx_hapi.endpoints.models.base import HapiBaseModel
Expand All @@ -7,6 +7,8 @@
get_resource_api_url,
get_dataset_url,
get_dataset_api_url,
get_organization_url,
get_organization_api_url,
)


Expand All @@ -29,26 +31,37 @@ def hdx_link(self) -> HttpUrl:
def hdx_api_link(self) -> HttpUrl:
return get_dataset_api_url(dataset_id=self.hdx_id)

@computed_field
@property
def provider_hdx_link(self) -> HttpUrl:
return get_organization_url(org_id=self.hdx_provider_stub)

model_config = ConfigDict(from_attributes=True)

@computed_field
@property
def provider_hdx_api_link(self) -> HttpUrl:
return get_organization_api_url(org_id=self.hdx_provider_stub)

model_config = ConfigDict(from_attributes=True)

def list_of_fields(self) -> List[str]:
fields = super().list_of_fields()
fields.extend(['hdx_link', 'api_link'])
fields.extend(['hdx_link', 'api_link', 'provider_hdx_link', 'provider_hdx_api_link'])
return fields


class ResourceResponse(HapiBaseModel):
# id: int
hdx_id: str = Field(max_length=36)
dataset_hdx_id: str = Field(max_length=36)
name: str = Field(max_length=256)
format: str = Field(max_length=32)
update_date: datetime
is_hxl: bool
hapi_updated_date: datetime
hapi_replaced_date: Optional[datetime]
download_url: HttpUrl
hapi_updated_date: datetime

dataset_hdx_id: str = Field(max_length=36)
dataset_hdx_stub: str = Field(max_length=128)

dataset_title: str = Field(max_length=1024)
Expand Down Expand Up @@ -77,9 +90,19 @@ def dataset_hdx_link(self) -> HttpUrl:
def dataset_hdx_api_link(self) -> HttpUrl:
return get_dataset_api_url(dataset_id=self.dataset_hdx_id)

@computed_field
@property
def provider_hdx_link(self) -> HttpUrl:
return get_organization_url(org_id=self.dataset_hdx_provider_stub)

@computed_field
@property
def provider_hdx_api_link(self) -> HttpUrl:
return get_organization_api_url(org_id=self.dataset_hdx_provider_stub)

model_config = ConfigDict(from_attributes=True)

def list_of_fields(self) -> List[str]:
fields = super().list_of_fields()
fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_api_link'])
fields.extend(['hdx_link', 'api_link', 'dataset_hdx_link', 'dataset_hdx_api_link'])
return fields
61 changes: 41 additions & 20 deletions hdx_hapi/services/hdx_url_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,90 +7,111 @@

CONFIG = get_config()


def get_dataset_url(dataset_id: str) -> HttpUrl:
"""Creates the full HDX URL for a dataset

Args:
context (Context):
context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX URL for the specified dataset
"""
"""
domain = CONFIG.HDX_DOMAIN
dataset_url = CONFIG.HDX_DATASET_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url=dataset_url.format(domain=domain, dataset_id=dataset_id)
url = dataset_url.format(domain=domain, dataset_id=dataset_id)
return HttpUrl(url=url)


def get_dataset_api_url(dataset_id: str) -> HttpUrl:
"""Creates the full HDX API URL for a dataset

Args:
context (Context):
context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX API URL for the specified dataset (package_show)
"""
"""
domain = CONFIG.HDX_DOMAIN
dataset_api_url = CONFIG.HDX_DATASET_API_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url=dataset_api_url.format(domain=domain, dataset_id=dataset_id)
url = dataset_api_url.format(domain=domain, dataset_id=dataset_id)
return HttpUrl(url=url)


def get_resource_url(dataset_id: str, resource_id: str) -> HttpUrl:
"""Creates the full HDX URL for a dataset

Args:
context (Context):
context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX URL for the specified dataset
"""
"""
domain = CONFIG.HDX_DOMAIN
resource_url = CONFIG.HDX_RESOURCE_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url=resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id)
url = resource_url.format(domain=domain, dataset_id=dataset_id, resource_id=resource_id)
return HttpUrl(url=url)


def get_resource_api_url(resource_id: str) -> HttpUrl:
"""Creates the full HDX API URL for a dataset

Args:
context (Context):
context (Context):
dataset_id (str): Dataset id or name
Returns:
str: HDX API URL for the specified dataset (package_show)
"""
"""
domain = CONFIG.HDX_DOMAIN
resource_api_url = CONFIG.HDX_RESOURCE_API_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url=resource_api_url.format(domain=domain, resource_id=resource_id)
url = resource_api_url.format(domain=domain, resource_id=resource_id)
return HttpUrl(url=url)


def get_organization_url(org_id: str) -> HttpUrl:
"""Creates the full HDX URL for an organization

Args:
context (Context):
context (Context):
org_id (str): Organization id or name

Returns:
str: HDX URL for the specified organization
"""
"""
domain = CONFIG.HDX_DOMAIN
organization_url = CONFIG.HDX_ORGANIZATION_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url = organization_url.format(domain=domain, org_id=org_id)
return HttpUrl(url=url)
return HttpUrl(url=url)


def get_organization_api_url(org_id: str) -> HttpUrl:
"""Creates the full HDX API URL for an organization

Args:
context (Context):
org_id (str): Organization id or name
Returns:
str: HDX API URL for the specified organization (package_show)
"""
domain = CONFIG.HDX_DOMAIN
organization_api_url = CONFIG.HDX_ORGANIZATION_API_URL
if not domain:
logger.warning('HDX_DOMAIN environment variable is not set.')

url = organization_api_url.format(domain=domain, org_id=org_id)
return HttpUrl(url=url)
4 changes: 0 additions & 4 deletions hdx_hapi/services/resource_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ async def get_resources_srv(
is_hxl: bool = None,
hapi_updated_date_min: datetime = None,
hapi_updated_date_max: datetime = None,
hapi_replaced_date_min: datetime = None,
hapi_replaced_date_max: datetime = None,
dataset_title: str = None,
dataset_hdx_id: str = None,
dataset_hdx_stub: str = None,
Expand All @@ -33,8 +31,6 @@ async def get_resources_srv(
is_hxl=is_hxl,
hapi_updated_date_min=hapi_updated_date_min,
hapi_updated_date_max=hapi_updated_date_max,
hapi_replaced_date_min=hapi_replaced_date_min,
hapi_replaced_date_max=hapi_replaced_date_max,
dataset_hdx_id=dataset_hdx_id,
dataset_hdx_stub=dataset_hdx_stub,
dataset_title=dataset_title,
Expand Down
5 changes: 2 additions & 3 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@
# from hdx_hapi.endpoints.get_population import router as population_router # noqa
# from hdx_hapi.endpoints.get_operational_presence import router as operational_presence_router # noqa
from hdx_hapi.endpoints.get_admin_level import router as admin_level_router # noqa

# from hdx_hapi.endpoints.get_hdx_metadata import router as dataset_router # noqa
from hdx_hapi.endpoints.get_hdx_metadata import router as dataset_router # noqa
from hdx_hapi.endpoints.get_humanitarian_response import router as humanitarian_response_router # noqa
# from hdx_hapi.endpoints.get_demographic import router as demographic_router # noqa
# from hdx_hapi.endpoints.get_food_security import router as food_security_router # noqa
Expand Down Expand Up @@ -56,7 +55,7 @@
app.include_router(humanitarian_response_router)
# app.include_router(demographic_router)
# app.include_router(population_profile_router)
# app.include_router(dataset_router)
app.include_router(dataset_router)


# add middleware
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
from hdx_hapi.db.models.base import Base
from hdx_hapi.db.models.views.util.util import CreateView


SAMPLE_DATA_SQL_FILES = [
'tests/sample_location_admin.sql',
'tests/sample_dataset_resource.sql'
'tests/sample_sector.sql',
'tests/sample_org_type.sql',
'tests/sample_org.sql',
Expand Down
Loading
Loading