@@ -236,7 +236,7 @@ def get_granules(params: dict, ShortName: str, SingleDay_flag: bool):
236236###================================================================================================================
237237
238238
239- def download_files_wrapper (urls , download_dir , version , n_workers , force_redownload ):
239+ def download_files_wrapper (urls , download_dir , version , n_workers , force_redownload , show_noredownload_msg ):
240240 """Wrapper for downloading functions"""
241241
242242 pass
@@ -247,11 +247,12 @@ def download_files_wrapper(urls, download_dir, version, n_workers, force_redownl
247247
248248 ### Helper subroutine to gracefully download single files and avoids re-downloading if file already exists.
249249 # To force redownload of the file, pass **True** to the boolean argument *force* (default **False**)\n,
250- def download_file (url : str , output_dir : str , version : str , force : bool = False ):
250+ def download_file (url : str , output_dir : str , version : str , force : bool = False , show_noredownload_msg : bool = True ):
251251 """url (str): the HTTPS url from which the file will download
252252 output_dir (str): the local path into which the file will download
253253 version (str): the version of ECCO to be downloaded
254254 force (bool): download even if the file exists locally already
255+ show_noredownload_msg (bool): show no re-download messages (vs. not showing messages)
255256 """
256257 if not isdir (output_dir ):
257258 raise Exception (f"Output directory doesnt exist! ({ output_dir } )" )
@@ -260,7 +261,8 @@ def download_file(url: str, output_dir: str, version: str, force: bool=False):
260261
261262 # if the file has already been downloaded, skip
262263 if isfile (target_file ) and force is False :
263- print (f'\n { basename (url )} already exists, and force=False, not re-downloading' )
264+ if show_noredownload_msg :
265+ print (f'\n { basename (url )} already exists, and force=False, not re-downloading' )
264266 return target_file ,0
265267 elif version == 'v4r5' :
266268 raise Exception ('Capability to download v4r5 files via HTTPS has not been added yet.\n ' \
@@ -281,15 +283,15 @@ def download_file(url: str, output_dir: str, version: str, force: bool=False):
281283
282284
283285 ### Helper subroutine to download all urls in the list `dls`
284- def download_files_concurrently (dls , download_dir , version , n_workers , force = False ):
286+ def download_files_concurrently (dls , download_dir , version , n_workers , force = False , show_noredownload_msg = True ):
285287 start_time = time .time ()
286288
287289 # use thread pool for concurrent downloads
288290 with ThreadPoolExecutor (max_workers = n_workers ) as executor :
289291
290292 # tqdm makes a cool progress bar
291293 results = list (tqdm (executor .map (download_file , dls , repeat (download_dir ),\
292- repeat (version ), repeat (force )),\
294+ repeat (version ), repeat (force ), repeat ( show_noredownload_msg ) ),\
293295 total = len (dls ), desc = 'DL Progress' ,\
294296 ascii = True , ncols = 75 , file = sys .stdout ))
295297
@@ -321,7 +323,7 @@ def download_files_concurrently(dls, download_dir, version, n_workers, force=Fal
321323 ### Method 1: Concurrent downloads
322324
323325 # Force redownload (or not) depending on value of force_redownload
324- downloaded_files = download_files_concurrently (urls , download_dir , version , n_workers , force_redownload )
326+ downloaded_files = download_files_concurrently (urls , download_dir , version , n_workers , force_redownload , show_noredownload_msg )
325327
326328 except :
327329 ### Method 2: Sequential Downloads
@@ -337,7 +339,7 @@ def download_files_concurrently(dls, download_dir, version, n_workers, force=Fal
337339 for u in urls :
338340 u_name = u .split ('/' )[- 1 ]
339341 print (f'downloading { u_name } ' )
340- result = download_file (url = u , output_dir = download_dir , force = force_redownload )
342+ result = download_file (url = u , output_dir = download_dir , force = force_redownload , show_noredownload_msg = show_noredownload_msg )
341343 downloaded_files .append (result [0 ])
342344 total_download_size_in_bytes += result [- 1 ]
343345
@@ -358,7 +360,8 @@ def download_files_concurrently(dls, download_dir, version, n_workers, force=Fal
358360
359361
360362def ecco_podaac_download (ShortName ,StartDate ,EndDate ,version ,snapshot_interval = 'monthly' ,download_root_dir = None ,\
361- n_workers = 6 ,force_redownload = False ,return_downloaded_files = False ):
363+ n_workers = 6 ,force_redownload = False ,show_noredownload_msg = True ,\
364+ return_downloaded_files = False ):
362365 """
363366
364367 This routine downloads ECCO datasets from PO.DAAC. It is adapted from the Jupyter notebooks
@@ -391,6 +394,11 @@ def ecco_podaac_download(ShortName,StartDate,EndDate,version,snapshot_interval='
391394
392395 force_redownload: bool, if True, existing files will be redownloaded and replaced;
393396 if False (default), existing files will not be replaced.
397+
398+ show_noredownload_msg: bool, if True (default), and force_redownload=False,
399+ display message for each file that is already
400+ downloaded (and therefore not re-downloaded);
401+ if False, these messages are not shown.
394402
395403 return_downloaded_files: bool, if True, string or list of downloaded file(s) (including files that were already on disk
396404 and not replaced) is returned.
@@ -424,7 +432,7 @@ def ecco_podaac_download(ShortName,StartDate,EndDate,version,snapshot_interval='
424432
425433 # Download the granules
426434
427- downloaded_files = download_files_wrapper (urls , download_dir , n_workers , force_redownload )
435+ downloaded_files = download_files_wrapper (urls , download_dir , n_workers , force_redownload , show_noredownload_msg )
428436
429437 if return_downloaded_files :
430438 if len (downloaded_files ) == 1 :
@@ -438,7 +446,7 @@ def ecco_podaac_download(ShortName,StartDate,EndDate,version,snapshot_interval='
438446
439447
440448def ecco_podaac_download_diskaware (ShortNames ,StartDate ,EndDate ,version ,snapshot_interval = None ,\
441- download_root_dir = None ,max_avail_frac = 0.5 ,n_workers = 6 ,force_redownload = False ):
449+ download_root_dir = None ,max_avail_frac = 0.5 ,n_workers = 6 ,force_redownload = False , show_noredownload_msg = True ):
442450
443451 """
444452
@@ -484,6 +492,11 @@ def ecco_podaac_download_diskaware(ShortNames,StartDate,EndDate,version,snapshot
484492 if False, existing files will not be replaced.
485493 Applies only if files are downloaded.
486494
495+ show_noredownload_msg: bool, if True (default), and force_redownload=False,
496+ display message for each file that is already
497+ downloaded (and therefore not re-downloaded);
498+ if False, these messages are not shown.
499+
487500
488501 Returns
489502 -------
@@ -564,7 +577,7 @@ def ecco_podaac_download_diskaware(ShortNames,StartDate,EndDate,version,snapshot
564577 download_dir = Path (download_root_dir ) / curr_shortname
565578
566579 # download files
567- curr_downloaded_files = download_files_wrapper (urls_list , download_dir , version , n_workers , force_redownload )
580+ curr_downloaded_files = download_files_wrapper (urls_list , download_dir , version , n_workers , force_redownload , show_noredownload_msg )
568581
569582 if len (curr_downloaded_files ) == 1 :
570583 # if only 1 file is downloaded, return a string of filename instead of a list
@@ -586,7 +599,7 @@ def ecco_podaac_download_diskaware(ShortNames,StartDate,EndDate,version,snapshot
586599
587600
588601def ecco_podaac_download_subset (ShortName ,StartDate = None ,EndDate = None ,snapshot_interval = None ,\
589- n_workers = 4 ,force_redownload = False ,\
602+ n_workers = 4 ,force_redownload = False ,show_noredownload_msg = True , \
590603 vars_to_include = 'all' ,vars_to_omit = None ,\
591604 times_to_include = 'all' ,\
592605 k_isel = [0 ,50 ,1 ],tile_isel = [0 ,13 ,1 ],j_isel = [0 ,90 ,1 ],i_isel = [0 ,90 ,1 ],\
@@ -630,6 +643,11 @@ def ecco_podaac_download_subset(ShortName,StartDate=None,EndDate=None,snapshot_i
630643 force_redownload: bool, if True, existing files will be redownloaded and replaced;
631644 if False, existing files will not be replaced.
632645
646+ show_noredownload_msg: bool, if True (default), and force_redownload=False,
647+ display message for each file that is already
648+ downloaded (and therefore not re-downloaded);
649+ if False, these messages are not shown.
650+
633651 vars_to_include: list or tuple, names of data variables to include in the downloaded files.
634652 Dimension and coordinate variables are automatically included,
635653 except for the lat/lon coordinate variables when include_latlon_coords=False.
@@ -983,7 +1001,7 @@ def download_file(url: str, output_file: str, force: bool=False):
9831001
9841002
9851003 def download_wrapper (url : str , url_append : str , download_dir : str , subset_file_id : str ,\
986- force : bool = False ):
1004+ force : bool = False , show_noredownload_msg : bool = True ):
9871005 import os .path
9881006
9891007 head , tail = os .path .split (url )
@@ -1002,7 +1020,8 @@ def download_wrapper(url: str, url_append: str, download_dir: str, subset_file_i
10021020 downloaded_files = []
10031021 total_download_size_in_bytes = 0
10041022 try :
1005- result = download_file (url = url , output_file = ncout , force = force )
1023+ result = download_file (url = url , output_file = ncout , force = force ,\
1024+ show_noredownload_msg = show_noredownload_msg )
10061025 downloaded_files .append (result [0 ])
10071026 total_download_size_in_bytes += result [1 ]
10081027 status_code = 0
@@ -1013,7 +1032,8 @@ def download_wrapper(url: str, url_append: str, download_dir: str, subset_file_i
10131032 while n_retry <= max_retries :
10141033 time .sleep (5 * (n_retry ** 2 ))
10151034 try :
1016- result = download_file (url = url , output_file = ncout , force = force )
1035+ result = download_file (url = url , output_file = ncout , force = force ,\
1036+ show_noredownload_msg = show_noredownload_msg )
10171037 downloaded_files .append (result [0 ])
10181038 total_download_size_in_bytes += result [1 ]
10191039 status_code = 0
@@ -1214,7 +1234,7 @@ def download_wrapper(url: str, url_append: str, download_dir: str, subset_file_i
12141234 with ThreadPoolExecutor (max_workers = n_workers ) as executor :
12151235 results = list (tqdm (executor .map (download_wrapper , grans_urls , repeat (url_append ),\
12161236 repeat (download_dir ), repeat (subset_file_id ),\
1217- repeat (force_redownload )),\
1237+ repeat (force_redownload ), repeat ( show_noredownload_msg ) ),\
12181238 total = len (grans_urls ), desc = 'DL Progress' ,\
12191239 ascii = True , ncols = 75 , file = sys .stdout ))
12201240 downloaded_files = []
@@ -1232,7 +1252,7 @@ def download_wrapper(url: str, url_append: str, download_dir: str, subset_file_i
12321252 status_codes = np .array ([]).astype ('int32' )
12331253 for url in grans_urls :
12341254 downloaded_file ,download_size ,status_code \
1235- = download_wrapper (url ,url_append ,download_dir ,subset_file_id ,force_redownload )
1255+ = download_wrapper (url ,url_append ,download_dir ,subset_file_id ,force_redownload , show_noredownload_msg )
12361256 downloaded_files .append (downloaded_file [0 ])
12371257 total_download_size_in_bytes += download_size
12381258 status_codes = np .append (status_codes ,status_code )
0 commit comments