Skip to content

Commit

Permalink
fix: pagination updates for the alerts endpoint (#106)
Browse files Browse the repository at this point in the history
* Updated alerts.get to get paginated results

* Proposed changes for the list alerts function.

* Removed default limit and added paging data

---------

Co-authored-by: Timothy MacDonald <[email protected]>
  • Loading branch information
alannix-lw and tmac1973 committed Feb 1, 2023
1 parent ab95e9e commit 5da3f25
Showing 1 changed file with 32 additions and 2 deletions.
34 changes: 32 additions & 2 deletions laceworksdk/api/v2/alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,18 @@ def __init__(self, session):
def get(self,
start_time=None,
end_time=None,
limit=None,
**request_params):
"""
A method to get Alerts objects.
:param start_time: A "%Y-%m-%dT%H:%M:%SZ" structured timestamp to begin from.
:param end_time: A "%Y-%m-%dT%H:%M:%S%Z" structured timestamp to end at.
:param limit: An integer representing the number of Alerts to return.
:param request_params: Additional request parameters.
(provides support for parameters that may be added in the future)
:return response json
:return: response json
"""

params = self.build_dict_from_items(
Expand All @@ -42,7 +44,35 @@ def get(self,

response = self._session.get(self.build_url(), params=params)

return response.json()
return_data = {"data": []}
current_rows = 0

while True:
response_json = response.json()

return_data["paging"] = response_json["paging"]

if limit:
take = limit - current_rows
return_data["data"].extend(response_json["data"][:take])
else:
return_data["data"].extend(response_json["data"])
current_rows = len(return_data["data"])

if limit and current_rows >= limit:
break

try:
next_page = response_json.get("paging", {}).get("urls", {}).get("nextPage")
except Exception:
next_page = None

if next_page:
response = self._session.get(next_page, params=params)
else:
break

return return_data

def get_details(self,
id,
Expand Down

0 comments on commit 5da3f25

Please sign in to comment.