Browse Source

cloud tier: add retry when copying data file

fix https://github.com/seaweedfs/seaweedfs/issues/3828
pull/3834/head
chrislu 2 years ago
parent
commit
e55076c46f
  1. 4
      weed/storage/backend/s3_backend/s3_backend.go

4
weed/storage/backend/s3_backend/s3_backend.go

@ -2,6 +2,7 @@ package s3_backend
import ( import (
"fmt" "fmt"
"github.com/seaweedfs/seaweedfs/weed/util"
"io" "io"
"os" "os"
"strings" "strings"
@ -91,7 +92,10 @@ func (s *S3BackendStorage) CopyFile(f *os.File, fn func(progressed int64, percen
glog.V(1).Infof("copying dat file of %s to remote s3.%s as %s", f.Name(), s.id, key) glog.V(1).Infof("copying dat file of %s to remote s3.%s as %s", f.Name(), s.id, key)
util.Retry("upload to S3", func() error {
size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn) size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn)
return err
})
return return
} }

Loading…
Cancel
Save