@@ -118,12 +118,13 @@ def load_references_data(self, endpoint, references_structure):
118118 data_files = self .lightbeam .get_data_files_for_endpoint (endpoint )
119119 for file_name in data_files :
120120 with open (file_name ) as file :
121- for line_counter , line in enumerate (file ):
121+ for i , line in enumerate (file ):
122+ line_number = i + 1
122123 line = line .strip ()
123124 try :
124125 payload = json .loads (line )
125126 except Exception as e :
126- self .logger .warning (f"... (ignoring invalid JSON payload at { line_counter } of { file_name } )" )
127+ self .logger .warning (f"... (ignoring invalid JSON payload at { line_number } of { file_name } )" )
127128 ref_payload = {}
128129 for key in references_structure [endpoint ]:
129130 key = self .EDFI_GENERIC_REFS_TO_PROPERTIES_MAPPING .get (key , {}).get (endpoint , key )
@@ -217,12 +218,13 @@ async def validate_endpoint(self, endpoint):
217218 for file_name in data_files :
218219 self .logger .info (f"validating { file_name } against { definition } schema..." )
219220 with open (file_name ) as file :
220- for line_counter , line in enumerate (file ):
221+ for i , line in enumerate (file ):
222+ line_number = i + 1
221223 total_counter += 1
222224 data = line .strip ()
223225
224226 tasks .append (asyncio .create_task (
225- self .do_validate_payload (endpoint , file_name , data , line_counter )))
227+ self .do_validate_payload (endpoint , file_name , data , line_number )))
226228
227229 if len (tasks ) >= self .MAX_VALIDATE_TASK_QUEUE_SIZE :
228230 await self .lightbeam .do_tasks (tasks , total_counter , log_status_counts = False )
@@ -243,6 +245,11 @@ async def validate_endpoint(self, endpoint):
243245
244246 if len (tasks )> 0 : await self .lightbeam .do_tasks (tasks , total_counter , log_status_counts = False )
245247
248+ # update metadata counts
249+ self .lightbeam .metadata ["resources" ][endpoint ]["records_processed" ] = total_counter
250+ self .lightbeam .metadata ["resources" ][endpoint ]["records_skipped" ] = self .lightbeam .num_skipped
251+ self .lightbeam .metadata ["resources" ][endpoint ]["records_failed" ] = self .lightbeam .num_errors
252+
246253 if self .lightbeam .num_errors == 0 : self .logger .info (f"... all lines validate ok!" )
247254 else :
248255 num_others = self .lightbeam .num_errors - self .MAX_VALIDATION_ERRORS_TO_DISPLAY
@@ -257,14 +264,14 @@ async def validate_endpoint(self, endpoint):
257264 self .schema_validator = None
258265
259266
260- async def do_validate_payload (self , endpoint , file_name , data , line_counter ):
267+ async def do_validate_payload (self , endpoint , file_name , data , line_number ):
261268 if self .fail_fast_threshold is not None and self .lightbeam .num_errors >= self .fail_fast_threshold : return
262269
263270 # check payload is valid JSON
264271 try :
265272 payload = json .loads (data )
266273 except Exception as e :
267- self .log_validation_error (endpoint , file_name , line_counter , "json" , f"invalid JSON { str (e ).replace (' line 1' ,'' )} " )
274+ self .log_validation_error (endpoint , file_name , line_number , "json" , f"invalid JSON { str (e ).replace (' line 1' ,'' )} " )
268275 return
269276
270277 # check payload obeys Swagger schema
@@ -275,14 +282,14 @@ async def do_validate_payload(self, endpoint, file_name, data, line_counter):
275282 e_path = [str (x ) for x in list (e .path )]
276283 context = ""
277284 if len (e_path )> 0 : context = " in " + " -> " .join (e_path )
278- self .log_validation_error (endpoint , file_name , line_counter , "schema" , f"{ str (e .message )} { context } " )
285+ self .log_validation_error (endpoint , file_name , line_number , "schema" , f"{ str (e .message )} { context } " )
279286 return
280287
281288 # check descriptor values are valid
282289 if "descriptors" in self .validation_methods :
283290 error_message = self .has_invalid_descriptor_values (payload , path = "" )
284291 if error_message != "" :
285- self .log_validation_error (endpoint , file_name , line_counter , "descriptors" , error_message )
292+ self .log_validation_error (endpoint , file_name , line_number , "descriptors" , error_message )
286293 return
287294
288295 # check natural keys are unique
@@ -298,7 +305,7 @@ async def do_validate_payload(self, endpoint, file_name, data, line_counter):
298305 self .lightbeam .api .do_oauth ()
299306 error_message = self .has_invalid_references (payload , path = "" )
300307 if error_message != "" :
301- self .log_validation_error (endpoint , file_name , line_counter , "references" , error_message )
308+ self .log_validation_error (endpoint , file_name , line_number , "references" , error_message )
302309
303310
304311 def log_validation_error (self , endpoint , file_name , line_number , method , message ):
0 commit comments