Browse Source
avoid recursive deleting newly created empty directories (#4016)
pull/4018/head
Konstantin Lebedev
2 years ago
committed by
GitHub
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with
8 additions and
1 deletions
-
weed/s3api/s3api_objects_list_handlers.go
|
@ -18,6 +18,8 @@ import ( |
|
|
"github.com/seaweedfs/seaweedfs/weed/s3api/s3err" |
|
|
"github.com/seaweedfs/seaweedfs/weed/s3api/s3err" |
|
|
) |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
const cutoffTimeNewEmptyDir = 3 |
|
|
|
|
|
|
|
|
type ListBucketResultV2 struct { |
|
|
type ListBucketResultV2 struct { |
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListBucketResult"` |
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListBucketResult"` |
|
|
Name string `xml:"Name"` |
|
|
Name string `xml:"Name"` |
|
@ -407,11 +409,16 @@ func (s3a *S3ApiServer) ensureDirectoryAllEmpty(filerClient filer_pb.SeaweedFile |
|
|
var startFrom string |
|
|
var startFrom string |
|
|
var isExhausted bool |
|
|
var isExhausted bool |
|
|
var foundEntry bool |
|
|
var foundEntry bool |
|
|
|
|
|
cutOffTimeAtSec := time.Now().Unix() + cutoffTimeNewEmptyDir |
|
|
for fileCounter == 0 && !isExhausted && err == nil { |
|
|
for fileCounter == 0 && !isExhausted && err == nil { |
|
|
err = filer_pb.SeaweedList(filerClient, currentDir, "", func(entry *filer_pb.Entry, isLast bool) error { |
|
|
err = filer_pb.SeaweedList(filerClient, currentDir, "", func(entry *filer_pb.Entry, isLast bool) error { |
|
|
foundEntry = true |
|
|
foundEntry = true |
|
|
if entry.IsDirectory { |
|
|
if entry.IsDirectory { |
|
|
subDirs = append(subDirs, entry.Name) |
|
|
|
|
|
|
|
|
if entry.Attributes != nil && cutOffTimeAtSec >= entry.Attributes.GetCrtime() { |
|
|
|
|
|
fileCounter++ |
|
|
|
|
|
} else { |
|
|
|
|
|
subDirs = append(subDirs, entry.Name) |
|
|
|
|
|
} |
|
|
} else { |
|
|
} else { |
|
|
fileCounter++ |
|
|
fileCounter++ |
|
|
} |
|
|
} |
|
|