Skip to content

Commit 337cd25

Browse files
Update log_struct method to add severity
1 parent 107f065 commit 337cd25

File tree

2 files changed

+15
-15
lines changed

2 files changed

+15
-15
lines changed

backend/score.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ async def create_source_knowledge_graph_url(
115115

116116
message = f"Source Node created successfully for source type: {source_type} and source: {source}"
117117
json_obj = {'api_name':'url_scan','db_url':uri,'url_scanned_file':lst_file_name, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
118-
logger.log_struct(json_obj)
118+
logger.log_struct(json_obj, "INFO")
119119
return create_api_response("Success",message=message,success_count=success_count,failed_count=failed_count,file_name=lst_file_name)
120120
except Exception as e:
121121
error_message = str(e)
@@ -203,7 +203,7 @@ async def extract_knowledge_graph_from_file(
203203
result['wiki_query'] = wiki_query
204204
result['source_type'] = source_type
205205
result['logging_time'] = formatted_time(datetime.now(timezone.utc))
206-
logger.log_struct({"severity":"INFO","jsonPayload":result})
206+
logger.log_struct(result, "INFO")
207207
extract_api_time = time.time() - start_time
208208
logging.info(f"extraction completed in {extract_api_time:.2f} seconds for file name {file_name}")
209209
return create_api_response('Success', data=result, file_source= source_type)
@@ -222,7 +222,7 @@ async def extract_knowledge_graph_from_file(
222222
logging.info(f'Deleted File Path: {merged_file_path} and Deleted File Name : {file_name}')
223223
delete_uploaded_local_file(merged_file_path,file_name)
224224
json_obj = {'message':message,'error_message':error_message, 'file_name': file_name,'status':'Failed','db_url':uri,'failed_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
225-
logger.log_struct({"severity":"ERROR","jsonPayload":json_obj})
225+
logger.log_struct(json_obj, "ERROR")
226226
logging.exception(f'File Failed in extraction: {json_obj}')
227227
return create_api_response('Failed', message=message + error_message[:100], error=error_message, file_name = file_name)
228228
finally:
@@ -239,7 +239,7 @@ async def get_source_list(uri:str, userName:str, password:str, database:str=None
239239
uri = uri.replace(" ","+")
240240
result = await asyncio.to_thread(get_source_list_from_graph,uri,userName,decoded_password,database)
241241
json_obj = {'api_name':'sources_list','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
242-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
242+
logger.log_struct(json_obj, "INFO")
243243
return create_api_response("Success",data=result)
244244
except Exception as e:
245245
job_status = "Failed"
@@ -269,7 +269,7 @@ async def post_processing(uri=Form(), userName=Form(), password=Form(), database
269269
json_obj = {'api_name': 'post_processing/create_entity_embedding', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
270270
logging.info(f'Entity Embeddings created')
271271

272-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
272+
logger.log_struct(json_obj, "INFO")
273273
return create_api_response('Success', message='All tasks completed successfully')
274274

275275
except Exception as e:
@@ -298,7 +298,7 @@ async def chat_bot(uri=Form(),model=Form(None),userName=Form(), password=Form(),
298298
result["info"]["response_time"] = round(total_call_time, 2)
299299

300300
json_obj = {'api_name':'chat_bot','db_url':uri,'session_id':session_id, 'logging_time': formatted_time(datetime.now(timezone.utc))}
301-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
301+
logger.log_struct(json_obj, "INFO")
302302

303303
return create_api_response('Success',data=result)
304304
except Exception as e:
@@ -316,7 +316,7 @@ async def chunk_entities(uri=Form(),userName=Form(), password=Form(), chunk_ids=
316316
logging.info(f"URI: {uri}, Username: {userName}, chunk_ids: {chunk_ids}")
317317
result = await asyncio.to_thread(get_entities_from_chunkids,uri=uri, username=userName, password=password, chunk_ids=chunk_ids)
318318
json_obj = {'api_name':'chunk_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
319-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
319+
logger.log_struct(json_obj, "INFO")
320320
return create_api_response('Success',data=result)
321321
except Exception as e:
322322
job_status = "Failed"
@@ -344,7 +344,7 @@ async def graph_query(
344344
document_names=document_names
345345
)
346346
json_obj = {'api_name':'graph_query','db_url':uri,'document_names':document_names, 'logging_time': formatted_time(datetime.now(timezone.utc))}
347-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
347+
logger.log_struct(json_obj, "INFO")
348348
return create_api_response('Success', data=result)
349349
except Exception as e:
350350
job_status = "Failed"
@@ -377,7 +377,7 @@ async def connect(uri=Form(), userName=Form(), password=Form(), database=Form())
377377
graph = create_graph_database_connection(uri, userName, password, database)
378378
result = await asyncio.to_thread(connection_check_and_get_vector_dimensions, graph)
379379
json_obj = {'api_name':'connect','db_url':uri,'status':result, 'count':1, 'logging_time': formatted_time(datetime.now(timezone.utc))}
380-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
380+
logger.log_struct(json_obj, "INFO")
381381
return create_api_response('Success',data=result)
382382
except Exception as e:
383383
job_status = "Failed"
@@ -394,7 +394,7 @@ async def upload_large_file_into_chunks(file:UploadFile = File(...), chunkNumber
394394
graph = create_graph_database_connection(uri, userName, password, database)
395395
result = await asyncio.to_thread(upload_file, graph, model, file, chunkNumber, totalChunks, originalname, uri, CHUNK_DIR, MERGED_DIR)
396396
json_obj = {'api_name':'upload','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
397-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
397+
logger.log_struct(json_obj, "INFO")
398398
if int(chunkNumber) == int(totalChunks):
399399
return create_api_response('Success',data=result, message='Source Node Created Successfully')
400400
else:
@@ -416,7 +416,7 @@ async def get_structured_schema(uri=Form(), userName=Form(), password=Form(), da
416416
result = await asyncio.to_thread(get_labels_and_relationtypes, graph)
417417
logging.info(f'Schema result from DB: {result}')
418418
json_obj = {'api_name':'schema','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
419-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
419+
logger.log_struct(json_obj, "INFO")
420420
return create_api_response('Success', data=result)
421421
except Exception as e:
422422
message="Unable to get the labels and relationtypes from neo4j database"
@@ -485,7 +485,7 @@ async def delete_document_and_entities(uri=Form(),
485485
# entities_count = result[0]['deletedEntities'] if 'deletedEntities' in result[0] else 0
486486
message = f"Deleted {files_list_size} documents with entities from database"
487487
json_obj = {'api_name':'delete_document_and_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
488-
logger.log_struct({"severity":"INFO","jsonPayload":json_obj})
488+
logger.log_struct(json_obj, "INFO")
489489
return create_api_response('Success',message=message)
490490
except Exception as e:
491491
job_status = "Failed"

backend/src/logger.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@ def __init__(self):
1111
else:
1212
self.logger = None
1313

14-
def log_struct(self, message):
14+
def log_struct(self, message, severity="DEFAULT"):
1515
if self.is_gcp_log_enabled and message is not None:
16-
self.logger.log_struct(message)
16+
self.logger.log_struct({"message": message, "severity": severity})
1717
else:
18-
print(message)
18+
print(f"[{severity}]{message}")

0 commit comments

Comments
 (0)