@@ -118,12 +118,13 @@ def load_references_data(self, endpoint, references_structure):
118118 data_files = self .lightbeam .get_data_files_for_endpoint (endpoint )
119119 for file_name in data_files :
120120 with open (file_name ) as file :
121- for line_counter , line in enumerate (file ):
121+ for i , line in enumerate (file ):
122+ line_number = i + 1
122123 line = line .strip ()
123124 try :
124125 payload = json .loads (line )
125126 except Exception as e :
126- self .logger .warning (f"... (ignoring invalid JSON payload at { line_counter } of { file_name } )" )
127+ self .logger .warning (f"... (ignoring invalid JSON payload at { line_number } of { file_name } )" )
127128 ref_payload = {}
128129 for key in references_structure [endpoint ]:
129130 key = self .EDFI_GENERIC_REFS_TO_PROPERTIES_MAPPING .get (key , {}).get (endpoint , key )
@@ -199,12 +200,13 @@ async def validate_endpoint(self, endpoint):
199200 for file_name in data_files :
200201 self .logger .info (f"validating { file_name } against { definition } schema..." )
201202 with open (file_name ) as file :
202- for line_counter , line in enumerate (file ):
203+ for i , line in enumerate (file ):
204+ line_number = i + 1
203205 total_counter += 1
204206 data = line .strip ()
205207
206208 tasks .append (asyncio .create_task (
207- self .do_validate_payload (endpoint , file_name , data , line_counter )))
209+ self .do_validate_payload (endpoint , file_name , data , line_number )))
208210
209211 if len (tasks ) >= self .MAX_VALIDATE_TASK_QUEUE_SIZE :
210212 await self .lightbeam .do_tasks (tasks , total_counter , log_status_counts = False )
@@ -235,10 +237,10 @@ async def validate_endpoint(self, endpoint):
235237 num_others = self .lightbeam .num_errors - self .MAX_VALIDATION_ERRORS_TO_DISPLAY
236238 if self .lightbeam .num_errors > self .MAX_VALIDATION_ERRORS_TO_DISPLAY :
237239 self .logger .warn (f"... and { num_others } others!" )
238- self .logger .warn (f"... VALIDATION ERRORS on { self .lightbeam .num_errors } of { line_counter } lines in { file_name } ; see details above." )
240+ self .logger .warn (f"... VALIDATION ERRORS on { self .lightbeam .num_errors } of { line_number } lines in { file_name } ; see details above." )
239241
240242
241- async def do_validate_payload (self , endpoint , file_name , data , line_counter ):
243+ async def do_validate_payload (self , endpoint , file_name , data , line_number ):
242244 if self .fail_fast_threshold is not None and self .lightbeam .num_errors >= self .fail_fast_threshold : return
243245 definition = self .get_swagger_definition_for_endpoint (endpoint )
244246 if "Descriptor" in endpoint :
@@ -262,7 +264,7 @@ async def do_validate_payload(self, endpoint, file_name, data, line_counter):
262264 try :
263265 payload = json .loads (data )
264266 except Exception as e :
265- self .log_validation_error (endpoint , file_name , line_counter , "json" , f"invalid JSON { str (e ).replace (' line 1' ,'' )} " )
267+ self .log_validation_error (endpoint , file_name , line_number , "json" , f"invalid JSON { str (e ).replace (' line 1' ,'' )} " )
266268 return
267269
268270 # check payload obeys Swagger schema
@@ -273,22 +275,22 @@ async def do_validate_payload(self, endpoint, file_name, data, line_counter):
273275 e_path = [str (x ) for x in list (e .path )]
274276 context = ""
275277 if len (e_path )> 0 : context = " in " + " -> " .join (e_path )
276- self .log_validation_error (endpoint , file_name , line_counter , "schema" , f"{ str (e .message )} { context } " )
278+ self .log_validation_error (endpoint , file_name , line_number , "schema" , f"{ str (e .message )} { context } " )
277279 return
278280
279281 # check descriptor values are valid
280282 if "descriptors" in self .validation_methods :
281283 error_message = self .has_invalid_descriptor_values (payload , path = "" )
282284 if error_message != "" :
283- self .log_validation_error (endpoint , file_name , line_counter , "descriptors" , error_message )
285+ self .log_validation_error (endpoint , file_name , line_number , "descriptors" , error_message )
284286 return
285287
286288 # check natural keys are unique
287289 if "uniqueness" in self .validation_methods :
288290 params = json .dumps (util .interpolate_params (identity_params_structure , payload ))
289291 params_hash = hashlog .get_hash (params )
290292 if params_hash in distinct_params :
291- self .log_validation_error (endpoint , file_name , line_counter , "uniqueness" , "duplicate value(s) for natural key(s): {params}" )
293+ self .log_validation_error (endpoint , file_name , line_number , "uniqueness" , "duplicate value(s) for natural key(s): {params}" )
292294 return
293295 else : distinct_params .append (params_hash )
294296
@@ -297,7 +299,7 @@ async def do_validate_payload(self, endpoint, file_name, data, line_counter):
297299 self .lightbeam .api .do_oauth ()
298300 error_message = self .has_invalid_references (payload , path = "" )
299301 if error_message != "" :
300- self .log_validation_error (endpoint , file_name , line_counter , "references" , error_message )
302+ self .log_validation_error (endpoint , file_name , line_number , "references" , error_message )
301303
302304
303305 def log_validation_error (self , endpoint , file_name , line_number , method , message ):
0 commit comments