Browse Source

cloud tier: add retry when copying data file

fix https://github.com/seaweedfs/seaweedfs/issues/3828
pull/3834/head
chrislu 2 years ago
parent
commit
e55076c46f
  1. 6
      weed/storage/backend/s3_backend/s3_backend.go

6
weed/storage/backend/s3_backend/s3_backend.go

@ -2,6 +2,7 @@ package s3_backend
import (
"fmt"
"github.com/seaweedfs/seaweedfs/weed/util"
"io"
"os"
"strings"
@ -91,7 +92,10 @@ func (s *S3BackendStorage) CopyFile(f *os.File, fn func(progressed int64, percen
glog.V(1).Infof("copying dat file of %s to remote s3.%s as %s", f.Name(), s.id, key)
size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn)
util.Retry("upload to S3", func() error {
size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn)
return err
})
return
}

Loading…
Cancel
Save