Skip to content

Commit

Permalink
Merge branch 'master' into fix-docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
nimaxin authored Nov 4, 2024
2 parents e157b3a + 9cd2145 commit 90204e2
Show file tree
Hide file tree
Showing 17 changed files with 258 additions and 111 deletions.
4 changes: 2 additions & 2 deletions requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ frozenlist==1.4.1
# aiosignal
google-api-core==2.15.0
# via google-api-python-client
google-api-python-client==2.149.0
google-api-python-client==2.151.0
# via -r requirements-base.in
google-auth==2.26.1
# via
Expand Down Expand Up @@ -330,7 +330,7 @@ python-dateutil==2.9.0.post0
# pandas
python-jose==3.3.0
# via -r requirements-base.in
python-multipart==0.0.16
python-multipart==0.0.17
# via -r requirements-base.in
python-slugify==8.0.4
# via -r requirements-base.in
Expand Down
4 changes: 2 additions & 2 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ executing==2.0.1
# stack-data
factory-boy==3.3.1
# via -r requirements-dev.in
faker==30.8.1
faker==30.8.2
# via
# -r requirements-dev.in
# factory-boy
Expand Down Expand Up @@ -86,7 +86,7 @@ python-dateutil==2.9.0.post0
# via faker
pyyaml==6.0.1
# via pre-commit
ruff==0.7.1
ruff==0.7.2
# via -r requirements-dev.in
six==1.16.0
# via
Expand Down
4 changes: 2 additions & 2 deletions src/dispatch/case/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ def case_remove_participant_flow(
def update_conversation(case: Case, db_session: Session) -> None:
"""Updates external communication conversation."""

# if case has dedicated channel, there's no thread to update
if case.conversation.thread_id is None:
# if no case conversation or case has dedicated channel, there's no thread to update
if case.conversation is None or case.conversation.thread_id is None:
return

plugin = plugin_service.get_active_instance(
Expand Down
16 changes: 9 additions & 7 deletions src/dispatch/event/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,18 @@ def delete_incident_event(
)


@background_task
def export_timeline(
timeline_filters: dict,
incident_id: int,
db_session=None,
organization_slug: str = None,
):
status = event_service.export_timeline(
db_session=db_session,
timeline_filters=timeline_filters,
incident_id=incident_id,
)
return status
try:
event_service.export_timeline(
db_session=db_session,
timeline_filters=timeline_filters,
incident_id=incident_id,
)

except Exception:
raise
44 changes: 31 additions & 13 deletions src/dispatch/event/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ def export_timeline(
)
else:
table_data.append({time_header: time, "Description": e.description})

else:
dates.add(date)
if timeline_filters.get("exportOwner"):
Expand All @@ -272,7 +273,7 @@ def export_timeline(
resource_type="dispatch-incident-document",
)
if documents:
documents_list.append(documents.resource_id)
documents_list.append((documents.resource_id, "Incident"))

if timeline_filters.get("reviewDocument"):
documents = document_service.get_by_incident_id_and_resource_type(
Expand All @@ -282,12 +283,12 @@ def export_timeline(
resource_type="dispatch-incident-review-document",
)
if documents:
documents_list.append(documents.resource_id)
documents_list.append((documents.resource_id, "Incident Review"))

for doc_id in documents_list:
for doc_id, doc_name in documents_list:
# Checks for existing table in the document
table_exists, curr_table_start, curr_table_end, _ = plugin.instance.get_table_details(
document_id=doc_id, header="Timeline"
document_id=doc_id, header="Timeline", doc_name=doc_name
)

# Deletes existing table
Expand All @@ -307,7 +308,9 @@ def export_timeline(
log.debug("Existing table in the doc has been deleted")

else:
log.debug("Table doesn't exist under header, creating new table")
curr_table_start += 1

# Insert new table with required rows & columns
insert_table_request = [
{
Expand All @@ -322,7 +325,12 @@ def export_timeline(
log.debug("Table skeleton inserted successfully")

else:
return False
log.error(
f"Unable to insert table skeleton in the {doc_name} document with id {doc_id}"
)
raise Exception(
f"Unable to insert table skeleton for timeline export in the {doc_name} document"
)

# Formatting & inserting empty table
insert_data_request = [
Expand Down Expand Up @@ -381,19 +389,26 @@ def export_timeline(
log.debug("Table Formatted successfully")

else:
return False
log.error(
f"Unable to format table for timeline export in {doc_name} document with id {doc_id}"
)
raise Exception(
f"Unable to format table for timeline export in the {doc_name} document"
)

# Calculating table cell indices
_, _, _, cell_indices = plugin.instance.get_table_details(
document_id=doc_id, header="Timeline"
document_id=doc_id, header="Timeline", doc_name=doc_name
)

data_to_insert = list(column_headers) + [
item for row in table_data for item in row.values()
]
str_len = 0
row_idx = 0
insert_data_request = []
print("cell indices")
print(len(cell_indices))
print(len(data_to_insert))
for index, text in zip(cell_indices, data_to_insert, strict=True):
# Adjusting index based on string length
new_idx = index + str_len
Expand All @@ -420,7 +435,7 @@ def export_timeline(
}
)

# Formatting for date rows
# Formating for date rows
if text == "\t":
insert_data_request.append(
{
Expand All @@ -446,7 +461,7 @@ def export_timeline(
}
)

# Formatting for time column
# Formating for time column
if row_idx % num_columns == 0:
insert_data_request.append(
{
Expand All @@ -465,8 +480,11 @@ def export_timeline(

data_inserted = plugin.instance.insert(document_id=doc_id, request=insert_data_request)
if not data_inserted:
return False
raise Exception(f"Encountered error while inserting data into the {doc_name} document")

else:
log.error("No timeline data to export")
return False
log.error("No data to export")
raise Exception("No data to export, please check filter selection")
# raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=[{"msg": "No timeline data to export"}]) from None

return True
2 changes: 1 addition & 1 deletion src/dispatch/incident/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def incident_create_resources(
) -> Incident:
"""Creates all resources required for incidents."""
# we create the incident ticket
if not incident.ticket:
if not incident.ticket or incident.ticket.resource_type == "jira-error-ticket":
ticket_flows.create_incident_ticket(incident=incident, db_session=db_session)

# we update the channel name immediately for dedicated channel cases escalated -> incident
Expand Down
20 changes: 13 additions & 7 deletions src/dispatch/incident/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,13 +391,19 @@ def export_timeline_event(
current_user: CurrentUser,
background_tasks: BackgroundTasks,
):
result = background_tasks.add_task(
event_flows.export_timeline,
timeline_filters=timeline_filters,
incident_id=incident_id,
organization_slug=organization,
)
return result
try:
event_flows.export_timeline(
timeline_filters=timeline_filters,
incident_id=incident_id,
organization_slug=organization,
db_session=db_session,
)
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=[{"msg": (f"{str(e)}.",)}],
) from e


@router.delete(
Expand Down
3 changes: 3 additions & 0 deletions src/dispatch/incident_cost/models.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from datetime import datetime

from sqlalchemy import Column, ForeignKey, Integer, Numeric
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import relationship
Expand Down Expand Up @@ -40,6 +42,7 @@ class IncidentCostUpdate(IncidentCostBase):
class IncidentCostRead(IncidentCostBase):
id: PrimaryKey
incident_cost_type: IncidentCostTypeRead
updated_at: Optional[datetime] = None


class IncidentCostPagination(Pagination):
Expand Down
45 changes: 41 additions & 4 deletions src/dispatch/plugins/dispatch_google/docs/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,9 @@ def insert(self, document_id: str, request) -> bool | None:
log.exception(e)
return False

def get_table_details(self, document_id: str, header: str) -> tuple[bool, int, int, list[int]]:
def get_table_details(
self, document_id: str, header: str, doc_name: str
) -> tuple[bool, int, int, list[int]]:
client = get_service(self.configuration, "docs", "v1", self.scopes).documents()
try:
document_content = (
Expand All @@ -220,9 +222,6 @@ def get_table_details(self, document_id: str, header: str) -> tuple[bool, int, i

elif header_section:
# Gets the end index of any text below the header
if header_section and item["textRun"]["content"].strip():
header_index = item["endIndex"]
# checking if we are past header in question
if (
any(
"headingId" in style
Expand All @@ -232,9 +231,42 @@ def get_table_details(self, document_id: str, header: str) -> tuple[bool, int, i
and element["paragraph"].get("paragraphStyle")["headingId"]
!= headingId
):
if header_index == element["startIndex"]:
requests = [
{
"insertText": {
"location": {
"index": header_index,
},
"text": "\n",
}
},
{
"updateParagraphStyle": {
"range": {
"startIndex": header_index,
"endIndex": header_index,
},
"paragraphStyle": {
"namedStyleType": "NORMAL_TEXT",
},
"fields": "namedStyleType",
}
},
]
if GoogleDocsDocumentPlugin.insert(
self, document_id=document_id, request=requests
):
header_index = header_index
past_header = True
header_section = False
break

if header_section and item["textRun"]["content"].strip():
header_index = item["endIndex"]

# checking if we are past header in question

# Checking for table under the header
elif header_section and "table" in element and not past_header:
table_exists = True
Expand All @@ -251,6 +283,11 @@ def get_table_details(self, document_id: str, header: str) -> tuple[bool, int, i
except Exception as e:
log.exception(e)
return table_exists, header_index, -1, table_indices
if header_index == 0:
log.error(
f"Could not find Timeline header in the {doc_name} document with id {document_id}"
)
raise Exception(f"Timeline header does not exist in the {doc_name} document")
return table_exists, header_index, -1, table_indices

def delete_table(self, document_id: str, request) -> bool | None:
Expand Down
Loading

0 comments on commit 90204e2

Please sign in to comment.