@@ -791,6 +791,7 @@ def extract_item(
791791 stripped_components = 0 ,
792792 original_path = None ,
793793 pi = None ,
794+ skip_integrity_errors = False ,
794795 ):
795796 """
796797 Extract archive item.
@@ -804,6 +805,8 @@ def extract_item(
804805 :param stripped_components: stripped leading path components to correct hard link extraction
805806 :param original_path: 'path' key as stored in archive
806807 :param pi: ProgressIndicatorPercent (or similar) for file extraction progress (in bytes)
808+ :param skip_integrity_errors: skip over corrupted chunks instead of raising IntegrityError
809+ (ignored for dry_run and stdout)
807810 """
808811 has_damaged_chunks = "chunks_healthy" in item
809812 if dry_run or stdout :
@@ -832,7 +835,7 @@ def extract_item(
832835 )
833836 if has_damaged_chunks :
834837 raise BackupError ("File has damaged (all-zero) chunks. Try running borg check --repair." )
835- return
838+ return True
836839
837840 original_path = original_path or item .path
838841 dest = self .cwd
@@ -867,15 +870,38 @@ def make_parent(path):
867870 fd = open (path , "wb" )
868871 with fd :
869872 ids = [c .id for c in item .chunks ]
870- for data in self .pipeline .fetch_many (ids , is_preloaded = True ):
873+ chunk_index = - 1
874+ chunk_iterator = self .pipeline .fetch_many (ids , is_preloaded = True )
875+ skipped_errors = False
876+ while True :
877+ try :
878+ chunk_index += 1
879+ data = next (chunk_iterator )
880+ except StopIteration :
881+ break
882+ except IntegrityError as err :
883+ if not skip_integrity_errors :
884+ raise
885+ c = item .chunks [chunk_index ]
886+ size = c .size
887+ logger .warning ("%s: chunk %s: %s" , remove_surrogates (item .path ), bin_to_hex (c .id ), err )
888+ with backup_io ("seek" ):
889+ fd .seek (size , 1 )
890+ skipped_errors = True
891+ # restart chunk data generator
892+ ids = [c .id for c in item .chunks [chunk_index + 1 :]]
893+ chunk_iterator = self .pipeline .fetch_many (ids , is_preloaded = True )
894+ else :
895+ with backup_io ("write" ):
896+ size = len (data )
897+ if sparse and zeros .startswith (data ):
898+ # all-zero chunk: create a hole in a sparse file
899+ fd .seek (size , 1 )
900+ else :
901+ fd .write (data )
871902 if pi :
872- pi .show (increase = len (data ), info = [remove_surrogates (item .path )])
873- with backup_io ("write" ):
874- if sparse and zeros .startswith (data ):
875- # all-zero chunk: create a hole in a sparse file
876- fd .seek (len (data ), 1 )
877- else :
878- fd .write (data )
903+ pi .show (increase = size , info = [remove_surrogates (item .path )])
904+
879905 with backup_io ("truncate_and_attrs" ):
880906 pos = item_chunks_size = fd .tell ()
881907 fd .truncate (pos )
@@ -889,7 +915,7 @@ def make_parent(path):
889915 )
890916 if has_damaged_chunks :
891917 raise BackupError ("File has damaged (all-zero) chunks. Try running borg check --repair." )
892- return
918+ return not skipped_errors
893919 with backup_io :
894920 # No repository access beyond this point.
895921 if stat .S_ISDIR (mode ):
@@ -914,18 +940,19 @@ def make_parent(path):
914940 make_parent (path )
915941 with self .extract_helper (item , path , hlm ) as hardlink_set :
916942 if hardlink_set :
917- return
943+ return True
918944 os .mkfifo (path )
919945 self .restore_attrs (path , item )
920946 elif stat .S_ISCHR (mode ) or stat .S_ISBLK (mode ):
921947 make_parent (path )
922948 with self .extract_helper (item , path , hlm ) as hardlink_set :
923949 if hardlink_set :
924- return
950+ return True
925951 os .mknod (path , item .mode , item .rdev )
926952 self .restore_attrs (path , item )
927953 else :
928954 raise Exception ("Unknown archive item type %r" % item .mode )
955+ return True
929956
930957 def restore_attrs (self , path , item , symlink = False , fd = None ):
931958 """
0 commit comments