Skip to content

Commit

Permalink
refactor: generalize JSON mapping for broader use
Browse files Browse the repository at this point in the history
  • Loading branch information
Ninad1306 committed Jun 21, 2024
1 parent ea22e8f commit fddcc72
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 193 deletions.
11 changes: 4 additions & 7 deletions india_compliance/gst_india/doctype/gstr_1_beta/gstr_1_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,7 @@
GSTR1_ItemField,
GSTR1_SubCategory,
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_json_map import (
convert_to_gov_data_format,
get_category_wise_data,
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_json_map import GSTR1DataMapper


class ExcelWidth(Enum):
Expand Down Expand Up @@ -58,7 +55,7 @@ def process_data(self, input_data):
2. Format/Transform the data to match the Gov Excel format
"""

category_wise_data = get_category_wise_data(input_data)
category_wise_data = GSTR1DataMapper().get_category_wise_data(input_data)
processed_data = {}

for category, data in category_wise_data.items():
Expand Down Expand Up @@ -1141,7 +1138,7 @@ def __init__(self, company_gstin, month_or_quarter, year):

self.summary = gstr1_log.load_data("reconcile_summary")["reconcile_summary"]
data = gstr1_log.load_data("reconcile")["reconcile"]
self.data = get_category_wise_data(data)
self.data = GSTR1DataMapper().get_category_wise_data(data)

def export_data(self):
excel = ExcelExporter()
Expand Down Expand Up @@ -2080,7 +2077,7 @@ def download_gstr_1_json(
"data": {
"gstin": company_gstin,
"fp": period,
**convert_to_gov_data_format(data, company_gstin),
**GSTR1DataMapper().convert_to_gov_data_format(data, company_gstin),
},
"filename": f"GSTR-1-Gov-{company_gstin}-{period}.json",
}
Expand Down
4 changes: 2 additions & 2 deletions india_compliance/gst_india/doctype/gstr_1_log/gstr_1_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_json_map import (
GSTR1BooksData,
summarize_retsum_data,
GSTR1DataMapper,
)
from india_compliance.gst_india.utils.gstr_utils import request_otp

Expand All @@ -41,7 +41,7 @@ def get_summarized_data(self, data, is_filed=False):
Helper function to summarize data for each sub-category
"""
if is_filed and data.get("summary"):
return summarize_retsum_data(data.get("summary"))
return GSTR1DataMapper().summarize_retsum_data(data.get("summary"))

subcategory_summary = self.get_subcategory_summary(data)

Expand Down
8 changes: 3 additions & 5 deletions india_compliance/gst_india/utils/gstr_1/gstr_1_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
from india_compliance.gst_india.doctype.gstr_import_log.gstr_import_log import (
create_import_log,
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_json_map import (
convert_to_internal_data_format,
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_json_map import GSTR1DataMapper

UNFILED_ACTIONS = [
"B2B",
Expand Down Expand Up @@ -77,7 +75,7 @@ def download_gstr1_json_data(gstr1_log):

json_data.update(response)

mapped_data = convert_to_internal_data_format(json_data)
mapped_data = GSTR1DataMapper().convert_to_internal_data_format(json_data)
gstr1_log.update_json_for(data_field, mapped_data, reset_reconcile=True)

if is_queued:
Expand Down Expand Up @@ -109,7 +107,7 @@ def save_gstr_1(gstin, return_period, json_data, return_type):
title=_("Invalid Response Received."),
)

mapped_data = convert_to_internal_data_format(json_data)
mapped_data = GSTR1DataMapper().convert_to_internal_data_format(json_data)

gstr1_log = frappe.get_doc("GSTR-1 Log", f"{return_period}-{gstin}")
gstr1_log.update_json_for(data_field, mapped_data, overwrite=False)
Expand Down
207 changes: 33 additions & 174 deletions india_compliance/gst_india/utils/gstr_1/gstr_1_json_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@
GSTR1DocumentIssuedSummary,
GSTR11A11BData,
)
from india_compliance.gst_india.utils import get_gst_accounts_by_type
from india_compliance.gst_india.utils.__init__ import get_party_for_gstin
from india_compliance.gst_india.utils import (
get_gst_accounts_by_type,
get_party_for_gstin,
)
from india_compliance.gst_india.utils.gstr_1 import (
CATEGORY_SUB_CATEGORY_MAPPING,
SUB_CATEGORY_GOV_CATEGORY_MAPPING,
Expand All @@ -24,6 +26,7 @@
GSTR1_SubCategory,
)
from india_compliance.gst_india.utils.gstr_1.gstr_1_data import GSTR1Invoices
from india_compliance.gst_india.utils.gstr_mapper_utils import GSTRDataMapper

############################################################################################################
### Map Govt JSON to Internal Data Structure ###############################################################
Expand Down Expand Up @@ -1834,179 +1837,35 @@ def map_document_types(self, doc_type, *args):
return self.SECTION_NAMES.get(doc_type, doc_type)


CLASS_MAP = {
GovJsonKey.B2B.value: B2B,
GovJsonKey.B2CL.value: B2CL,
GovJsonKey.EXP.value: Exports,
GovJsonKey.B2CS.value: B2CS,
GovJsonKey.NIL_EXEMPT.value: NilRated,
GovJsonKey.CDNR.value: CDNR,
GovJsonKey.CDNUR.value: CDNUR,
GovJsonKey.HSN.value: HSNSUM,
GovJsonKey.DOC_ISSUE.value: DOC_ISSUE,
GovJsonKey.AT.value: AT,
GovJsonKey.TXP.value: TXPD,
GovJsonKey.SUPECOM.value: SUPECOM,
GovJsonKey.RET_SUM.value: RETSUM,
}


def convert_to_internal_data_format(gov_data):
"""
Converts Gov data format to internal data format for all categories
"""
output = {}

for category, mapper_class in CLASS_MAP.items():
if not gov_data.get(category):
continue

output.update(
mapper_class().convert_to_internal_data_format(gov_data.get(category))
)

return output


def get_category_wise_data(
subcategory_wise_data: dict,
mapping: dict = SUB_CATEGORY_GOV_CATEGORY_MAPPING,
) -> dict:
"""
returns category wise data from subcategory wise data
Args:
subcategory_wise_data (dict): subcategory wise data
mapping (dict): subcategory to category mapping
with_subcategory (bool): include subcategory level data
Returns:
dict: category wise data
Example (with_subcategory=True):
{
"B2B, SEZ, DE": {
"B2B": data,
...
}
...
}
Example (with_subcategory=False):
{
"B2B, SEZ, DE": data,
...
}
"""
category_wise_data = {}
for subcategory, category in mapping.items():
if not subcategory_wise_data.get(subcategory.value):
continue

category_wise_data.setdefault(category.value, []).extend(
subcategory_wise_data.get(subcategory.value, [])
)

return category_wise_data


def convert_to_gov_data_format(internal_data: dict, company_gstin: str) -> dict:
"""
converts internal data format to Gov data format for all categories
"""

category_wise_data = get_category_wise_data(internal_data)

output = {}
for category, mapper_class in CLASS_MAP.items():
if not category_wise_data.get(category):
continue

output[category] = mapper_class().convert_to_gov_data_format(
category_wise_data.get(category), company_gstin=company_gstin
)

return output


def summarize_retsum_data(input_data):
if not input_data:
return []

summarized_data = []
total_values_keys = [
"total_igst_amount",
"total_cgst_amount",
"total_sgst_amount",
"total_cess_amount",
"total_taxable_value",
]
amended_data = {key: 0 for key in total_values_keys}

input_data = {row.get("description"): row for row in input_data}

def _sum(row):
return flt(sum([row.get(key, 0) for key in total_values_keys]), 2)

for category, sub_categories in CATEGORY_SUB_CATEGORY_MAPPING.items():
category = category.value
if category not in input_data:
continue

# compute total liability and total amended data
amended_category_data = input_data.get(f"{category} (Amended)", {})
for key in total_values_keys:
amended_data[key] += amended_category_data.get(key, 0)

# add category data
if _sum(input_data[category]) == 0:
continue

summarized_data.append({**input_data.get(category), "indent": 0})

# add subcategory data
for sub_category in sub_categories:
sub_category = sub_category.value
if sub_category not in input_data:
continue

if _sum(input_data[sub_category]) == 0:
continue

summarized_data.append(
{
**input_data.get(sub_category),
"indent": 1,
"consider_in_total_taxable_value": (
False
if sub_category
in SUBCATEGORIES_NOT_CONSIDERED_IN_TOTAL_TAXABLE_VALUE
else True
),
"consider_in_total_tax": (
False
if sub_category in SUBCATEGORIES_NOT_CONSIDERED_IN_TOTAL_TAX
else True
),
}
)

# add total amendment liability
if _sum(amended_data) != 0:
summarized_data.extend(
[
{
"description": "Net Liability from Amendments",
**amended_data,
"indent": 0,
"consider_in_total_taxable_value": True,
"consider_in_total_tax": True,
"no_of_records": 0,
}
]
)
class GSTR1DataMapper(GSTRDataMapper):
CLASS_MAP = {
GovJsonKey.B2B.value: B2B,
GovJsonKey.B2CL.value: B2CL,
GovJsonKey.EXP.value: Exports,
GovJsonKey.B2CS.value: B2CS,
GovJsonKey.NIL_EXEMPT.value: NilRated,
GovJsonKey.CDNR.value: CDNR,
GovJsonKey.CDNUR.value: CDNUR,
GovJsonKey.HSN.value: HSNSUM,
GovJsonKey.DOC_ISSUE.value: DOC_ISSUE,
GovJsonKey.AT.value: AT,
GovJsonKey.TXP.value: TXPD,
GovJsonKey.SUPECOM.value: SUPECOM,
GovJsonKey.RET_SUM.value: RETSUM,
}

return summarized_data
category_sub_category_mapping = CATEGORY_SUB_CATEGORY_MAPPING
subcategories_not_considered_in_total_tax = (
SUBCATEGORIES_NOT_CONSIDERED_IN_TOTAL_TAX
)
subcategories_not_considered_in_total_taxable_value = (
SUBCATEGORIES_NOT_CONSIDERED_IN_TOTAL_TAXABLE_VALUE
)
mapping = SUB_CATEGORY_GOV_CATEGORY_MAPPING

def convert_to_gov_data_format(self, internal_data, company_gstin):
category_wise_data = self.get_category_wise_data(internal_data)
return super().convert_to_gov_data_format(category_wise_data, company_gstin)


####################################################################################################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from india_compliance.gst_india.doctype.gstr_1_log.gstr_1_log import GenerateGSTR1
from india_compliance.gst_india.utils import get_party_for_gstin as _get_party_for_gstin
from india_compliance.gst_india.utils.gstr_1 import (
SUB_CATEGORY_GOV_CATEGORY_MAPPING,
GovDataField,
GSTR1_B2B_InvoiceType,
GSTR1_DataField,
Expand All @@ -24,8 +23,8 @@
SUPECOM,
TXPD,
Exports,
GSTR1DataMapper,
NilRated,
get_category_wise_data,
)


Expand All @@ -39,9 +38,9 @@ def normalize_data(data):

def process_mapped_data(data):
return list(
get_category_wise_data(
normalize_data(copy.deepcopy(data)), SUB_CATEGORY_GOV_CATEGORY_MAPPING
).values()
GSTR1DataMapper()
.get_category_wise_data(normalize_data(copy.deepcopy(data)))
.values()
)[0]


Expand Down

0 comments on commit fddcc72

Please sign in to comment.