@@ -115,7 +115,7 @@ async def create_source_knowledge_graph_url(
115
115
116
116
message = f"Source Node created successfully for source type: { source_type } and source: { source } "
117
117
json_obj = {'api_name' :'url_scan' ,'db_url' :uri ,'url_scanned_file' :lst_file_name , 'source_url' :source_url , 'wiki_query' :wiki_query , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
118
- logger .log_struct (json_obj )
118
+ logger .log_struct (json_obj , "INFO" )
119
119
return create_api_response ("Success" ,message = message ,success_count = success_count ,failed_count = failed_count ,file_name = lst_file_name )
120
120
except Exception as e :
121
121
error_message = str (e )
@@ -203,7 +203,7 @@ async def extract_knowledge_graph_from_file(
203
203
result ['wiki_query' ] = wiki_query
204
204
result ['source_type' ] = source_type
205
205
result ['logging_time' ] = formatted_time (datetime .now (timezone .utc ))
206
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : result } )
206
+ logger .log_struct (result , " INFO" )
207
207
extract_api_time = time .time () - start_time
208
208
logging .info (f"extraction completed in { extract_api_time :.2f} seconds for file name { file_name } " )
209
209
return create_api_response ('Success' , data = result , file_source = source_type )
@@ -222,7 +222,7 @@ async def extract_knowledge_graph_from_file(
222
222
logging .info (f'Deleted File Path: { merged_file_path } and Deleted File Name : { file_name } ' )
223
223
delete_uploaded_local_file (merged_file_path ,file_name )
224
224
json_obj = {'message' :message ,'error_message' :error_message , 'file_name' : file_name ,'status' :'Failed' ,'db_url' :uri ,'failed_count' :1 , 'source_type' : source_type , 'source_url' :source_url , 'wiki_query' :wiki_query , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
225
- logger .log_struct ({ "severity" : " ERROR", "jsonPayload" : json_obj } )
225
+ logger .log_struct (json_obj , " ERROR" )
226
226
logging .exception (f'File Failed in extraction: { json_obj } ' )
227
227
return create_api_response ('Failed' , message = message + error_message [:100 ], error = error_message , file_name = file_name )
228
228
finally :
@@ -239,7 +239,7 @@ async def get_source_list(uri:str, userName:str, password:str, database:str=None
239
239
uri = uri .replace (" " ,"+" )
240
240
result = await asyncio .to_thread (get_source_list_from_graph ,uri ,userName ,decoded_password ,database )
241
241
json_obj = {'api_name' :'sources_list' ,'db_url' :uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
242
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
242
+ logger .log_struct (json_obj , " INFO" )
243
243
return create_api_response ("Success" ,data = result )
244
244
except Exception as e :
245
245
job_status = "Failed"
@@ -269,7 +269,7 @@ async def post_processing(uri=Form(), userName=Form(), password=Form(), database
269
269
json_obj = {'api_name' : 'post_processing/create_entity_embedding' , 'db_url' : uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
270
270
logging .info (f'Entity Embeddings created' )
271
271
272
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
272
+ logger .log_struct (json_obj , " INFO" )
273
273
return create_api_response ('Success' , message = 'All tasks completed successfully' )
274
274
275
275
except Exception as e :
@@ -298,7 +298,7 @@ async def chat_bot(uri=Form(),model=Form(None),userName=Form(), password=Form(),
298
298
result ["info" ]["response_time" ] = round (total_call_time , 2 )
299
299
300
300
json_obj = {'api_name' :'chat_bot' ,'db_url' :uri ,'session_id' :session_id , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
301
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
301
+ logger .log_struct (json_obj , " INFO" )
302
302
303
303
return create_api_response ('Success' ,data = result )
304
304
except Exception as e :
@@ -316,7 +316,7 @@ async def chunk_entities(uri=Form(),userName=Form(), password=Form(), chunk_ids=
316
316
logging .info (f"URI: { uri } , Username: { userName } , chunk_ids: { chunk_ids } " )
317
317
result = await asyncio .to_thread (get_entities_from_chunkids ,uri = uri , username = userName , password = password , chunk_ids = chunk_ids )
318
318
json_obj = {'api_name' :'chunk_entities' ,'db_url' :uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
319
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
319
+ logger .log_struct (json_obj , " INFO" )
320
320
return create_api_response ('Success' ,data = result )
321
321
except Exception as e :
322
322
job_status = "Failed"
@@ -344,7 +344,7 @@ async def graph_query(
344
344
document_names = document_names
345
345
)
346
346
json_obj = {'api_name' :'graph_query' ,'db_url' :uri ,'document_names' :document_names , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
347
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
347
+ logger .log_struct (json_obj , " INFO" )
348
348
return create_api_response ('Success' , data = result )
349
349
except Exception as e :
350
350
job_status = "Failed"
@@ -377,7 +377,7 @@ async def connect(uri=Form(), userName=Form(), password=Form(), database=Form())
377
377
graph = create_graph_database_connection (uri , userName , password , database )
378
378
result = await asyncio .to_thread (connection_check_and_get_vector_dimensions , graph )
379
379
json_obj = {'api_name' :'connect' ,'db_url' :uri ,'status' :result , 'count' :1 , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
380
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
380
+ logger .log_struct (json_obj , " INFO" )
381
381
return create_api_response ('Success' ,data = result )
382
382
except Exception as e :
383
383
job_status = "Failed"
@@ -394,7 +394,7 @@ async def upload_large_file_into_chunks(file:UploadFile = File(...), chunkNumber
394
394
graph = create_graph_database_connection (uri , userName , password , database )
395
395
result = await asyncio .to_thread (upload_file , graph , model , file , chunkNumber , totalChunks , originalname , uri , CHUNK_DIR , MERGED_DIR )
396
396
json_obj = {'api_name' :'upload' ,'db_url' :uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
397
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
397
+ logger .log_struct (json_obj , " INFO" )
398
398
if int (chunkNumber ) == int (totalChunks ):
399
399
return create_api_response ('Success' ,data = result , message = 'Source Node Created Successfully' )
400
400
else :
@@ -416,7 +416,7 @@ async def get_structured_schema(uri=Form(), userName=Form(), password=Form(), da
416
416
result = await asyncio .to_thread (get_labels_and_relationtypes , graph )
417
417
logging .info (f'Schema result from DB: { result } ' )
418
418
json_obj = {'api_name' :'schema' ,'db_url' :uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
419
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
419
+ logger .log_struct (json_obj , " INFO" )
420
420
return create_api_response ('Success' , data = result )
421
421
except Exception as e :
422
422
message = "Unable to get the labels and relationtypes from neo4j database"
@@ -485,7 +485,7 @@ async def delete_document_and_entities(uri=Form(),
485
485
# entities_count = result[0]['deletedEntities'] if 'deletedEntities' in result[0] else 0
486
486
message = f"Deleted { files_list_size } documents with entities from database"
487
487
json_obj = {'api_name' :'delete_document_and_entities' ,'db_url' :uri , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
488
- logger .log_struct ({ "severity" : " INFO", "jsonPayload" : json_obj } )
488
+ logger .log_struct (json_obj , " INFO" )
489
489
return create_api_response ('Success' ,message = message )
490
490
except Exception as e :
491
491
job_status = "Failed"
0 commit comments