From e55076c46f4d4f4b08d940ac0a972f445776e029 Mon Sep 17 00:00:00 2001 From: chrislu Date: Wed, 12 Oct 2022 00:38:32 -0700 Subject: [PATCH] cloud tier: add retry when copying data file fix https://github.com/seaweedfs/seaweedfs/issues/3828 --- weed/storage/backend/s3_backend/s3_backend.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/weed/storage/backend/s3_backend/s3_backend.go b/weed/storage/backend/s3_backend/s3_backend.go index 0b3db3c67..73b33716c 100644 --- a/weed/storage/backend/s3_backend/s3_backend.go +++ b/weed/storage/backend/s3_backend/s3_backend.go @@ -2,6 +2,7 @@ package s3_backend import ( "fmt" + "github.com/seaweedfs/seaweedfs/weed/util" "io" "os" "strings" @@ -91,7 +92,10 @@ func (s *S3BackendStorage) CopyFile(f *os.File, fn func(progressed int64, percen glog.V(1).Infof("copying dat file of %s to remote s3.%s as %s", f.Name(), s.id, key) - size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn) + util.Retry("upload to S3", func() error { + size, err = uploadToS3(s.conn, f.Name(), s.bucket, key, s.storageClass, fn) + return err + }) return }