Browse Source

fix test_bucket_listv2_encoding_basic without sort

pull/5580/head
Konstantin Lebedev 8 months ago
parent
commit
4e70053263
  1. 27
      weed/filer/abstract_sql/abstract_sql_store.go
  2. 5
      weed/s3api/s3api_object_handlers.go
  3. 22
      weed/s3api/s3api_object_handlers_list.go

27
weed/filer/abstract_sql/abstract_sql_store.go

@ -341,10 +341,12 @@ func (store *AbstractSqlStore) ListRecursivePrefixedEntries(ctx context.Context,
}
shortDir := string(shortPath)
var dirPrefix string
if shortDir == "/" || prefix == "" {
dirPrefix = fmt.Sprintf("%s%s%%", shortDir, prefix)
} else {
dirPrefix = fmt.Sprintf("%s/%s%%", shortDir, prefix)
if !delimiter {
if shortDir == "/" || prefix == "" {
dirPrefix = fmt.Sprintf("%s%s%%", shortDir, prefix)
} else {
dirPrefix = fmt.Sprintf("%s/%s%%", shortDir, prefix)
}
}
glog.V(0).Infof("ListRecursivePrefixedEntries %s lastFileName %s shortPath %v, prefix %v, startFileName %s, limit %d, delimiter %v, dirPrefix %s", string(dirPath), lastFileName, string(shortPath), prefix, startFileName, limit, delimiter, dirPrefix)
rows, err := db.QueryContext(ctx, store.GetSqlListRecursive(bucket), startFileName, util.HashStringToLong(shortDir), prefix+"%", dirPrefix, limit+2)
@ -376,22 +378,11 @@ func (store *AbstractSqlStore) ListRecursivePrefixedEntries(ctx context.Context,
glog.Errorf("scan decode %s : %v", entry.FullPath, err)
return lastFileName, fmt.Errorf("scan decode %s : %v", entry.FullPath, err)
}
if !delimiter && entry.IsDirectory() {
glog.V(0).Infof("scan isDir %v skip %v", entry.IsDirectory(), entry.FullPath)
isDirectory := entry.IsDirectory() && entry.Attr.Mime == "" && entry.Attr.FileSize == 0
if !delimiter && isDirectory {
glog.V(0).Infof("scan is filer dir %v skip %v as object key", isDirectory, entry.FullPath)
continue
}
// Todo test_bucket_listv2_delimiter_prefix move start from prefix to SQL because in extreme cases, where there are more keys that need to be skipped than the limit
if delimiter {
if shortDir == fileName && !(entry.IsDirectory() && entry.Attr.Mime != "") {
// glog.V(0).Infof("scan is not DirKey %v skip %v", entry.IsDirectory(), entry.FullPath)
continue
}
if shortDir != dir && (!entry.IsDirectory() || (len(startFileName) > 0 && strings.HasPrefix(dir, startFileName))) {
// glog.V(0).Infof("scan isDir %v skip %v", entry.IsDirectory(), entry.FullPath)
continue
}
}
if !eachEntryFunc(entry) {
break
}

5
weed/s3api/s3api_object_handlers.go

@ -72,7 +72,7 @@ func removeDuplicateSlashes(object string) string {
return result.String()
}
func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bucketPrefix string, fetchOwner bool, isDirectory bool) (listEntry ListEntry) {
func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bucketPrefix string, fetchOwner bool, isDirectory bool, encodingTypeUrl bool) (listEntry ListEntry) {
storageClass := "STANDARD"
if v, ok := entry.Extended[s3_constants.AmzStorageClass]; ok {
storageClass = string(v)
@ -84,6 +84,9 @@ func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bu
if key == "" {
key = fmt.Sprintf(keyFormat, dir, name)[len(bucketPrefix):]
}
if encodingTypeUrl {
key = urlPathEscape(key)
}
listEntry = ListEntry{
Key: key,
LastModified: time.Unix(entry.Attributes.Mtime, 0).UTC(),

22
weed/s3api/s3api_object_handlers_list.go

@ -12,7 +12,6 @@ import (
"io"
"net/http"
"net/url"
"sort"
"strconv"
"strings"
)
@ -193,20 +192,26 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
if delimiter == "/" {
if entry.IsDirectoryKeyObject() {
// glog.V(0).Infof("append IsDirectoryKeyObject %s", key+"/")
contents = append(contents, newListEntry(entry, key+"/", "", "", bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, key+"/", "", "", bucketPrefix, fetchOwner, false, encodingTypeUrl))
cursor.maxKeys--
return
}
if entry.IsDirectory {
// glog.V(0).Infof("append commonPrefixes %s", key+"/")
var prefixKey string
if encodingTypeUrl {
prefixKey = urlPathEscape(key + "/")
} else {
prefixKey = key + "/"
}
commonPrefixes = append(commonPrefixes, PrefixEntry{
Prefix: key + "/",
Prefix: prefixKey,
})
cursor.maxKeys--
return
}
}
contents = append(contents, newListEntry(entry, key, "", "", bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, key, "", "", bucketPrefix, fetchOwner, false, encodingTypeUrl))
cursor.maxKeys--
},
)
@ -229,7 +234,7 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
dirName, entryName, prefixName := entryUrlEncode(dir, entry.Name, encodingTypeUrl)
if entry.IsDirectory {
if entry.IsDirectoryKeyObject() {
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, true))
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, true, false))
cursor.maxKeys--
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html
} else if delimiter == "/" { // A response can contain CommonPrefixes only if you specify a delimiter.
@ -272,7 +277,7 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
}
}
if !delimiterFound {
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, false, false))
cursor.maxKeys--
}
}
@ -307,9 +312,8 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
CommonPrefixes: commonPrefixes,
}
if encodingTypeUrl {
sort.Slice(response.CommonPrefixes, func(i, j int) bool {
return response.CommonPrefixes[i].Prefix < response.CommonPrefixes[j].Prefix
})
// Todo used for pass test_bucket_listv2_encoding_basic
// sort.Slice(response.CommonPrefixes, func(i, j int) bool { return response.CommonPrefixes[i].Prefix < response.CommonPrefixes[j].Prefix })
response.EncodingType = s3.EncodingTypeUrl
}
return nil

Loading…
Cancel
Save