diff --git a/utilities/project-factory/LOCALSETUP.md b/utilities/project-factory/LOCALSETUP.md new file mode 100644 index 00000000000..3417b07ccd7 --- /dev/null +++ b/utilities/project-factory/LOCALSETUP.md @@ -0,0 +1,32 @@ +# Local Setup + +To set up the ProjectFactory service in your local system, clone the [Digit Frontend repository](https://github.com/egovernments/DIGIT-Frontend). + +## Dependencies + +### Infra Dependency + +- [x] Postgres DB +- [ ] Redis +- [ ] Elasticsearch +- [x] Kafka + - [x] Consumer + - [x] Producer + +## Running Locally + +### Local setup +1. To setup the ProjectFactory service, clone the [Digit Frontend repository](https://github.com/egovernments/DIGIT-Frontend). +2. Install Node.js version 20 using nvm (Node Version Manager). +3. Update the configs in [utilities/project-factory/src/server/config/index.ts](utilities/project-factory/src/server/config/index.ts), change HOST to "http://localhost:8080/" and KAFKA_BROKER_HOST to "localhost:9092". +4. Also update DB config values as per your local system config. +5. Update all dependency service host either on any unified-env or port-forward. +6. Open the terminal and run the following command + + `cd utilities/project-factory/` + + `yarn install` (run this command only once when you clone the repo) + + `yarn dev` + +> Note: After running the above command, if a Kafka error occurs, ensure that Kafka and Zookeeper are running in the background. If a connection error with another microservice occurs, ensure that the URL mentioned in the external mapping of the data config is correct, or you can port-forward that particular service. diff --git a/utilities/project-factory/src/server/config/dbPoolConfig.ts b/utilities/project-factory/src/server/config/dbPoolConfig.ts index 7dc093baa4e..45360d7355c 100644 --- a/utilities/project-factory/src/server/config/dbPoolConfig.ts +++ b/utilities/project-factory/src/server/config/dbPoolConfig.ts @@ -2,11 +2,11 @@ import { Pool } from 'pg'; import config from '.'; const pool = new Pool({ - user: config.DB_USER, - host: config.DB_HOST, - database: config.DB_NAME, - password: config.DB_PASSWORD, - port: parseInt(config.DB_PORT) + user: config.DB_CONFIG.DB_USER, + host: config.DB_CONFIG.DB_HOST, + database: config.DB_CONFIG.DB_NAME, + password: config.DB_CONFIG.DB_PASSWORD, + port: parseInt(config.DB_CONFIG.DB_PORT) }); export default pool; diff --git a/utilities/project-factory/src/server/config/index.ts b/utilities/project-factory/src/server/config/index.ts index 86c4f4d0af6..9d6467cfbb9 100644 --- a/utilities/project-factory/src/server/config/index.ts +++ b/utilities/project-factory/src/server/config/index.ts @@ -45,14 +45,15 @@ const config = { KAFKA_CREATE_RESOURCE_ACTIVITY_TOPIC: process.env.KAFKA_CREATE_RESOURCE_ACTIVITY_TOPIC || "create-resource-activity", KAFKA_UPDATE_GENERATED_RESOURCE_DETAILS_TOPIC: process.env.KAFKA_UPDATE_GENERATED_RESOURCE_DETAILS_TOPIC || "update-generated-resource-details", KAFKA_CREATE_GENERATED_RESOURCE_DETAILS_TOPIC: process.env.KAFKA_CREATE_GENERATED_RESOURCE_DETAILS_TOPIC || "create-generated-resource-details", - // Default hierarchy type - hierarchyType: "NITISH", + // Database configuration - DB_USER: process.env.DB_USER || "postgres", - DB_HOST: process.env.DB_HOST?.split(':')[0] || "localhost", - DB_NAME: process.env.DB_NAME || "postgres", - DB_PASSWORD: process.env.DB_PASSWORD || "postgres", - DB_PORT: process.env.DB_PORT || "5432", + DB_CONFIG: { + DB_USER: process.env.DB_USER || "postgres", + DB_HOST: process.env.DB_HOST?.split(':')[0] || "localhost", + DB_NAME: process.env.DB_NAME || "postgres", + DB_PASSWORD: process.env.DB_PASSWORD || "postgres", + DB_PORT: process.env.DB_PORT || "5432", + }, // Application configuration app: { port: parseInt(process.env.APP_PORT || "8080") || 8080, @@ -107,25 +108,19 @@ const config = { localizationSearch: process.env.EGOV_LOCALIZATION_SEARCH || "localization/messages/v1/_search", localizationCreate: "localization/messages/v1/_upsert", projectTypeSearch: "project-factory/v1/project-type/search", - boundaryRelationshipCreate:"boundary-service/boundary-relationships/_create" + boundaryRelationshipCreate: "boundary-service/boundary-relationships/_create" }, // Values configuration values: { userMainBoundary: "mz", userMainBoundaryType: "Country", - parsingTemplate: "HCM.ParsingTemplate", - transfromTemplate: "HCM.TransformTemplate", - campaignType: "HCM.HCMTemplate", - APIResource: "HCM.APIResourceTemplate3", idgen: { format: process.env.CMP_IDGEN_FORMAT || "CMP-[cy:yyyy-MM-dd]-[SEQ_EG_CMP_ID]", idName: process.env.CMP_IDGEN_IDNAME || "campaign.number" }, matchFacilityData: false, retryCount: process.env.CREATE_RESOURCE_RETRY_COUNT || "3" - }, - // Default search template - SEARCH_TEMPLATE: "HCM.APIResourceTemplate3" + } }; // Exporting getErrorCodes function and config object export { getErrorCodes }; diff --git a/utilities/project-factory/src/server/utils/genericUtils.ts b/utilities/project-factory/src/server/utils/genericUtils.ts index 54da9fd6532..1c77fbad533 100644 --- a/utilities/project-factory/src/server/utils/genericUtils.ts +++ b/utilities/project-factory/src/server/utils/genericUtils.ts @@ -4,7 +4,7 @@ import config, { getErrorCodes } from "../config/index"; import { v4 as uuidv4 } from 'uuid'; import { produceModifiedMessages } from "../kafka/Listener"; import { generateHierarchyList, getAllFacilities, getHierarchy } from "../api/campaignApis"; -import { searchMDMS, getCount, getBoundarySheetData, getSheetData, createAndUploadFile, createExcelSheet, getTargetSheetData, callMdmsData } from "../api/genericApis"; +import { getBoundarySheetData, getSheetData, createAndUploadFile, createExcelSheet, getTargetSheetData, callMdmsData } from "../api/genericApis"; import * as XLSX from 'xlsx'; import FormData from 'form-data'; import { logger } from "./logger"; @@ -17,7 +17,6 @@ import { getLocaleFromRequest, getLocalisationModuleName } from "./localisationU import { getBoundaryColumnName, getBoundaryTabName } from "./boundaryUtils"; import { getBoundaryDataService } from "../service/dataManageService"; const NodeCache = require("node-cache"); -const _ = require('lodash'); const updateGeneratedResourceTopic = config.KAFKA_UPDATE_GENERATED_RESOURCE_DETAILS_TOPIC; const createGeneratedResourceTopic = config.KAFKA_CREATE_GENERATED_RESOURCE_DETAILS_TOPIC; @@ -366,67 +365,6 @@ async function getFinalUpdatedResponse(result: any, responseData: any, request: }); } -async function callSearchApi(request: any, response: any) { - try { - let result: any; - const { type } = request.query; - result = await searchMDMS([type], config.SEARCH_TEMPLATE, request.body.RequestInfo, response); - const filter = request?.body?.Filters; - const requestBody = { "RequestInfo": request?.body?.RequestInfo, filter }; - const responseData = result?.mdms?.[0]?.data; - if (!responseData || responseData.length === 0) { - return errorResponder({ message: "Invalid ApiResource Type. Check Logs" }, request, response); - } - const host = responseData?.host; - const url = responseData?.searchConfig?.url; - var queryParams: any = {}; - for (const searchItem of responseData?.searchConfig?.searchBody) { - if (searchItem.isInParams) { - queryParams[searchItem.path] = searchItem.value; - } - else if (searchItem.isInBody) { - _.set(requestBody, `${searchItem.path}`, searchItem.value); - } - } - const countknown = responseData?.searchConfig?.isCountGiven === true; - let responseDatas: any[] = []; - const searchPath = responseData?.searchConfig?.keyName; - let fetchedData: any; - let responseObject: any; - - if (countknown) { - const count = await getCount(responseData, request, response); - let noOfTimesToFetchApi = Math.ceil(count / queryParams.limit); - for (let i = 0; i < noOfTimesToFetchApi; i++) { - responseObject = await httpRequest(host + url, requestBody, queryParams, undefined, undefined, undefined); - fetchedData = _.get(responseObject, searchPath); - fetchedData.forEach((item: any) => { - responseDatas.push(item); - }); - queryParams.offset = (parseInt(queryParams.offset) + parseInt(queryParams.limit)).toString(); - } - } - - else { - while (true) { - responseObject = await httpRequest(host + url, requestBody, queryParams, undefined, undefined, undefined); - fetchedData = _.get(responseObject, searchPath); - fetchedData.forEach((item: any) => { - responseDatas.push(item); - }); - queryParams.offset = (parseInt(queryParams.offset) + parseInt(queryParams.limit)).toString(); - if (fetchedData.length < parseInt(queryParams.limit)) { - break; - } - } - } - return responseDatas; - } - catch (e: any) { - logger.error(String(e)) - return errorResponder({ message: String(e) + " Check Logs" }, request, response); - } -} async function fullProcessFlowForNewEntry(newEntryResponse: any, generatedResource: any, request: any) { @@ -1003,7 +941,6 @@ export { generateAuditDetails, generateActivityMessage, getResponseFromDb, - callSearchApi, getModifiedResponse, getNewEntryResponse, getOldEntryResponse,