Skip to content

Commit

Permalink
Merge pull request #33 from KathreftisAI/qa-release
Browse files Browse the repository at this point in the history
Qa release
  • Loading branch information
unileshyadav authored May 16, 2024
2 parents 965dddf + 744ed95 commit 861090e
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 42 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ EXPOSE 5000
ENV NAME World


CMD ["gunicorn","-k","uvicorn.workers.UvicornWorker", "-w", "2", "--threads", "8","--timeout","300000", "-b", "0.0.0.0:5000", "modal2:app"]
CMD ["gunicorn","-k","uvicorn.workers.UvicornWorker", "-w", "4", "--threads", "16","--timeout","300000", "-b", "0.0.0.0:5000", "modal2:app"]

# Run app.py when the container launches
#CMD ["uvicorn", "modal2:app", "--host", "0.0.0.0", "--port", "5000"]
#CMD ["uvicorn", "modal2:app", "--host", "0.0.0.0", "--port", "5000"]
69 changes: 45 additions & 24 deletions database/watcher/watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import pymongo
from database.connection import get_collection
from timeout_decorator import timeout
from concurrent.futures import ThreadPoolExecutor, as_completed
import concurrent.futures


# Global variables
Expand Down Expand Up @@ -359,33 +361,52 @@ def execute_final_code(tenant, reconciliationId):
#only_present_in_Cymmetri = full_set_of_cymmetri_logins - full_set_of_target_logins

#print(full_set_of_cymmetri_logins)

for login in full_set_of_cymmetri_logins:
#print(f'{login} only found in cymmetri')
login_break_count = getBreakCount(tenant, reconciliationId,login)
record = {
"reconciliationId": reconciliationId,
"AppName": appName,
"appId": appId,
"cymmetri_login": login,
"break_type": "present_in_Cymmetri_only",
"performedAt": datetime.now(),
"reconReportMetadataId": rrMetaId,
"login":login,
"break_count": login_break_count
}
#shReconBreakRecords.insert_one(record)
# for login in full_set_of_cymmetri_logins:
# login_break_count = getBreakCount(tenant, reconciliationId,login)
# record = {
# "reconciliationId": reconciliationId,
# "AppName": appName,
# "appId": appId,
# "cymmetri_login": login,
# "break_type": "present_in_Cymmetri_only",
# "performedAt": datetime.now(),
# "reconReportMetadataId": rrMetaId,
# "login":login,
# "break_count": login_break_count
# }
# login_data.append(record)
# logging.debug("data appneded succesfully")

batch_size = 500 # Adjust this value based on your requirements
num_batches = (len(full_set_of_cymmetri_logins) + batch_size - 1) // batch_size

for i in range(num_batches):
batch_logins = full_set_of_cymmetri_logins[i * batch_size : (i + 1) * batch_size]
batch_data = []

for login in batch_logins:
login_break_count = getBreakCount(tenant, reconciliationId, login)
record = {
"reconciliationId": reconciliationId,
"AppName": appName,
"appId": appId,
"cymmetri_login": login,
"break_type": "present_in_Cymmetri_only",
"performedAt": datetime.now(),
"reconReportMetadataId": rrMetaId,
"login": login,
"break_count": login_break_count
}
batch_data.append(record)

login_data.append(record)
try:
shReconBreakRecords.insert_many(login_data)
logging.debug("Data Inserted into shReconBreakRecords succesfully.")
except Exception as e:
logging.error(f"Failed to insert data into shReconBreakRecords collection: {e}")
try:
shReconBreakRecords.insert_many(batch_data)
logging.debug(f"Batch {i+1}/{num_batches} inserted successfully.")
except Exception as e:
logging.debug(f"Failed to insert batch {i+1}/{num_batches}: {e}")





break_types = ["present_in_target_only", "present_in_Cymmetri_only", "app_overdue"]

# Initialize a dictionary to store the counts
Expand Down
50 changes: 34 additions & 16 deletions modal2.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,12 @@ async def get_matching_records(request: MatchingRecordRequestSchema, tenant: str
# Log the start of the operation with tenant information
logging.info("matching_record API started")
logging.info(f"Starting 'get_matching_records' for tenant: {tenant} at {datetime.datetime.now()}")


if not request.appId:
logging.error("Missing appID in the payload")
raise HTTPException(status_code=400,detail="Missing appId",headers={"errorCode":"missing appID"})


try:
# Initialize the collection references
Expand All @@ -248,14 +254,16 @@ async def get_matching_records(request: MatchingRecordRequestSchema, tenant: str
count = doc['count'] # Extract the total count of unique documents
logging.debug(f"Total records found: {count}")

if not count:
response_val = {
"data": None,
"success": False,
"errorCode": "DATA_MISSING_ERROR",
"message": "Missing data for requested appId"
}
return create_bad_request_response(response_val)


# if not count:
# response_val = {
# "data": None,
# "success": False,
# "errorCode": "DATA_MISSING_ERROR",
# "message": "Data is not available for this appID"
# }
# return response_val

# Step 3: Define the pipeline to retrieve the latest document per reconciliationId and paginate the results
pipeline = [
Expand Down Expand Up @@ -361,7 +369,9 @@ async def populating_data(request: ModalRequestSchema, tenant: str = Header(...)
return_val['break_info'] = break_info

logging.debug(f"Return value prepared: {return_val}")
return return_val
#return return_val
logging.debug(f"this is my data: {return_val}")
return ResponseModel(return_val, message="modal api executed successfully")

else:
cymmetri_login = breakReconRecord['cymmetri_login']
Expand Down Expand Up @@ -391,8 +401,9 @@ async def populating_data(request: ModalRequestSchema, tenant: str = Header(...)
}
}
logging.debug(f"Return value prepared: {return_val}")
data_response = return_val
return ResponseModel(data=data_response, message="Policy mapping generated successfully")
return_val
logging.debug(f"this is my data: {return_val}")
return ResponseModel(return_val, message="modal executed successfully")

except Exception as e:
logging.error(f"An error occurred: {str(e)}")
Expand Down Expand Up @@ -561,11 +572,18 @@ async def start_recon(request: StartReconRequestSchema, fullRequest: Request) ->
logging.debug("Existing job count for reconciliationId %s: %d", reconciliationId, count)

if count > 0:
response_val = {}
response_val['success'] = 'false'
response_val['error'] = 'JOB_RUNNING'
response_val['errorMessage'] = '360 degree reconciliation is already running for given reconciliationId'
raise HTTPException(status_code=400, detail=response_val)
response_val = {
"data": None,
"success": False,
"errorCode": "RECONCILATIONALREADYRUNNING",
"message": "360 degree reconciliation is already running for given reconciliationId"
}
return create_bad_request_response(response_val)
# response_val = {}
# response_val['success'] = 'false'
# response_val['error'] = 'JOB_RUNNING'
# response_val['errorMessage'] = '360 degree reconciliation is already running for given reconciliationId'
# raise HTTPException(status_code=400, detail=response_val)

# Call to external service
baseUrl_for_java_srvc = data['RECON_PROV_SRVC']
Expand Down

0 comments on commit 861090e

Please sign in to comment.