@@ -12,7 +12,8 @@ def download_who_data():
1212 }
1313
1414 print ("1. Fetching clean percentage data from WHO API..." )
15- api_response = requests .get (api_url , params = params )
15+ # Timeout set to 5s for connection, 300s for data transfer
16+ api_response = requests .get (api_url , params = params , timeout = (5 , 300 ))
1617
1718 if api_response .status_code != 200 :
1819 print (f"Failed to fetch API data. HTTP { api_response .status_code } " )
@@ -24,10 +25,14 @@ def download_who_data():
2425 # 2. Get ONLY the iso3 code from the master database
2526 print ("2. Fetching country iso3 codes from WHO master database..." )
2627 master_url = "https://extranet.who.int/tme/generateCSV.asp?ds=notifications"
28+
29+ # Fetch data first to ensure timeout is applied correctly
30+ master_response = requests .get (master_url , timeout = (5 , 300 ))
31+ master_response .raise_for_status ()
2732
28- # We only pull the 'country' (for matching) and 'iso3' columns
33+ # Load from the response text to avoid a second unprotected download call
2934 geo_columns = ['country' , 'iso3' ]
30- master_df = pd .read_csv (master_url , usecols = geo_columns ).drop_duplicates ()
35+ master_df = pd .read_csv (io . StringIO ( master_response . text ) , usecols = geo_columns ).drop_duplicates ()
3136
3237 # 3. Merge the two datasets together based on the country name
3338 print ("3. Merging data and formatting..." )
@@ -54,4 +59,10 @@ def download_who_data():
5459 print (f"Success! Data saved locally as '{ filename } '" )
5560
5661if __name__ == "__main__" :
57- download_who_data ()
62+ try :
63+ download_who_data ()
64+ except requests .exceptions .Timeout :
65+ print ("Error: The request timed out. The server may be slow or offline." )
66+ except Exception as e :
67+ print (f"An unexpected error occurred: { e } " )
68+
0 commit comments