@@ -608,14 +608,18 @@ def read_data(self, args):
608608 local_warnings = f"No station file is valid."
609609 return urls , station , local_errors , local_warnings
610610 except Exception as error :
611- local_errors = f'Opening files : { error } .'
611+ local_errors = f'Selecting station file : { error } .'
612612 return urls , station , local_errors , local_warnings
613613 else :
614614 url = urls [0 ]
615615
616- nc = Dataset (url , mode = 'r' )
617- ds = xr .open_dataset (xr .backends .NetCDF4DataStore (nc ))
618-
616+ try :
617+ nc = Dataset (url , mode = 'r' )
618+ ds = xr .open_dataset (xr .backends .NetCDF4DataStore (nc ))
619+ except Exception as error :
620+ local_errors = f'Opening file: { error } .'
621+ return url , station , local_errors , local_warnings
622+
619623 possible_vars , possible_var = self .get_var_in_file (ds , var , actris_parameter , ebas_component )
620624 if possible_var is None :
621625 local_errors = f'No variable name matches for { possible_vars } . Existing keys: { list (ds .data_vars )} .'
@@ -952,8 +956,8 @@ def get_data(self, files, var, actris_parameter, target_start_date, target_end_d
952956 # wait for worker processes to terminate before continuing
953957 pool .join ()
954958
955- if ( len (errors ) + len ( warnings ) ) == len (args_list ):
956- self .download_instance .logger .info ('All datasets have thrown an error or warning , aborting.' )
959+ if len (errors ) == len (args_list ):
960+ self .download_instance .logger .info ('All datasets have thrown an error, aborting.' )
957961 return
958962
959963 # get combined data and metadata after read
0 commit comments