Browse Source

fix test_bucket_listv2_encoding_basic without sort

pull/5580/head
Konstantin Lebedev 8 months ago
parent
commit
4e70053263
  1. 19
      weed/filer/abstract_sql/abstract_sql_store.go
  2. 5
      weed/s3api/s3api_object_handlers.go
  3. 22
      weed/s3api/s3api_object_handlers_list.go

19
weed/filer/abstract_sql/abstract_sql_store.go

@ -341,11 +341,13 @@ func (store *AbstractSqlStore) ListRecursivePrefixedEntries(ctx context.Context,
} }
shortDir := string(shortPath) shortDir := string(shortPath)
var dirPrefix string var dirPrefix string
if !delimiter {
if shortDir == "/" || prefix == "" { if shortDir == "/" || prefix == "" {
dirPrefix = fmt.Sprintf("%s%s%%", shortDir, prefix) dirPrefix = fmt.Sprintf("%s%s%%", shortDir, prefix)
} else { } else {
dirPrefix = fmt.Sprintf("%s/%s%%", shortDir, prefix) dirPrefix = fmt.Sprintf("%s/%s%%", shortDir, prefix)
} }
}
glog.V(0).Infof("ListRecursivePrefixedEntries %s lastFileName %s shortPath %v, prefix %v, startFileName %s, limit %d, delimiter %v, dirPrefix %s", string(dirPath), lastFileName, string(shortPath), prefix, startFileName, limit, delimiter, dirPrefix) glog.V(0).Infof("ListRecursivePrefixedEntries %s lastFileName %s shortPath %v, prefix %v, startFileName %s, limit %d, delimiter %v, dirPrefix %s", string(dirPath), lastFileName, string(shortPath), prefix, startFileName, limit, delimiter, dirPrefix)
rows, err := db.QueryContext(ctx, store.GetSqlListRecursive(bucket), startFileName, util.HashStringToLong(shortDir), prefix+"%", dirPrefix, limit+2) rows, err := db.QueryContext(ctx, store.GetSqlListRecursive(bucket), startFileName, util.HashStringToLong(shortDir), prefix+"%", dirPrefix, limit+2)
if err != nil { if err != nil {
@ -376,22 +378,11 @@ func (store *AbstractSqlStore) ListRecursivePrefixedEntries(ctx context.Context,
glog.Errorf("scan decode %s : %v", entry.FullPath, err) glog.Errorf("scan decode %s : %v", entry.FullPath, err)
return lastFileName, fmt.Errorf("scan decode %s : %v", entry.FullPath, err) return lastFileName, fmt.Errorf("scan decode %s : %v", entry.FullPath, err)
} }
if !delimiter && entry.IsDirectory() {
glog.V(0).Infof("scan isDir %v skip %v", entry.IsDirectory(), entry.FullPath)
continue
}
// Todo test_bucket_listv2_delimiter_prefix move start from prefix to SQL because in extreme cases, where there are more keys that need to be skipped than the limit
if delimiter {
if shortDir == fileName && !(entry.IsDirectory() && entry.Attr.Mime != "") {
// glog.V(0).Infof("scan is not DirKey %v skip %v", entry.IsDirectory(), entry.FullPath)
isDirectory := entry.IsDirectory() && entry.Attr.Mime == "" && entry.Attr.FileSize == 0
if !delimiter && isDirectory {
glog.V(0).Infof("scan is filer dir %v skip %v as object key", isDirectory, entry.FullPath)
continue continue
} }
if shortDir != dir && (!entry.IsDirectory() || (len(startFileName) > 0 && strings.HasPrefix(dir, startFileName))) {
// glog.V(0).Infof("scan isDir %v skip %v", entry.IsDirectory(), entry.FullPath)
continue
}
}
if !eachEntryFunc(entry) { if !eachEntryFunc(entry) {
break break
} }

5
weed/s3api/s3api_object_handlers.go

@ -72,7 +72,7 @@ func removeDuplicateSlashes(object string) string {
return result.String() return result.String()
} }
func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bucketPrefix string, fetchOwner bool, isDirectory bool) (listEntry ListEntry) {
func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bucketPrefix string, fetchOwner bool, isDirectory bool, encodingTypeUrl bool) (listEntry ListEntry) {
storageClass := "STANDARD" storageClass := "STANDARD"
if v, ok := entry.Extended[s3_constants.AmzStorageClass]; ok { if v, ok := entry.Extended[s3_constants.AmzStorageClass]; ok {
storageClass = string(v) storageClass = string(v)
@ -84,6 +84,9 @@ func newListEntry(entry *filer_pb.Entry, key string, dir string, name string, bu
if key == "" { if key == "" {
key = fmt.Sprintf(keyFormat, dir, name)[len(bucketPrefix):] key = fmt.Sprintf(keyFormat, dir, name)[len(bucketPrefix):]
} }
if encodingTypeUrl {
key = urlPathEscape(key)
}
listEntry = ListEntry{ listEntry = ListEntry{
Key: key, Key: key,
LastModified: time.Unix(entry.Attributes.Mtime, 0).UTC(), LastModified: time.Unix(entry.Attributes.Mtime, 0).UTC(),

22
weed/s3api/s3api_object_handlers_list.go

@ -12,7 +12,6 @@ import (
"io" "io"
"net/http" "net/http"
"net/url" "net/url"
"sort"
"strconv" "strconv"
"strings" "strings"
) )
@ -193,20 +192,26 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
if delimiter == "/" { if delimiter == "/" {
if entry.IsDirectoryKeyObject() { if entry.IsDirectoryKeyObject() {
// glog.V(0).Infof("append IsDirectoryKeyObject %s", key+"/") // glog.V(0).Infof("append IsDirectoryKeyObject %s", key+"/")
contents = append(contents, newListEntry(entry, key+"/", "", "", bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, key+"/", "", "", bucketPrefix, fetchOwner, false, encodingTypeUrl))
cursor.maxKeys-- cursor.maxKeys--
return return
} }
if entry.IsDirectory { if entry.IsDirectory {
// glog.V(0).Infof("append commonPrefixes %s", key+"/") // glog.V(0).Infof("append commonPrefixes %s", key+"/")
var prefixKey string
if encodingTypeUrl {
prefixKey = urlPathEscape(key + "/")
} else {
prefixKey = key + "/"
}
commonPrefixes = append(commonPrefixes, PrefixEntry{ commonPrefixes = append(commonPrefixes, PrefixEntry{
Prefix: key + "/",
Prefix: prefixKey,
}) })
cursor.maxKeys-- cursor.maxKeys--
return return
} }
} }
contents = append(contents, newListEntry(entry, key, "", "", bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, key, "", "", bucketPrefix, fetchOwner, false, encodingTypeUrl))
cursor.maxKeys-- cursor.maxKeys--
}, },
) )
@ -229,7 +234,7 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
dirName, entryName, prefixName := entryUrlEncode(dir, entry.Name, encodingTypeUrl) dirName, entryName, prefixName := entryUrlEncode(dir, entry.Name, encodingTypeUrl)
if entry.IsDirectory { if entry.IsDirectory {
if entry.IsDirectoryKeyObject() { if entry.IsDirectoryKeyObject() {
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, true))
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, true, false))
cursor.maxKeys-- cursor.maxKeys--
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html
} else if delimiter == "/" { // A response can contain CommonPrefixes only if you specify a delimiter. } else if delimiter == "/" { // A response can contain CommonPrefixes only if you specify a delimiter.
@ -272,7 +277,7 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
} }
} }
if !delimiterFound { if !delimiterFound {
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, false))
contents = append(contents, newListEntry(entry, "", dirName, entryName, bucketPrefix, fetchOwner, false, false))
cursor.maxKeys-- cursor.maxKeys--
} }
} }
@ -307,9 +312,8 @@ func (s3a *S3ApiServer) listFilerEntries(bucket string, originalPrefix string, m
CommonPrefixes: commonPrefixes, CommonPrefixes: commonPrefixes,
} }
if encodingTypeUrl { if encodingTypeUrl {
sort.Slice(response.CommonPrefixes, func(i, j int) bool {
return response.CommonPrefixes[i].Prefix < response.CommonPrefixes[j].Prefix
})
// Todo used for pass test_bucket_listv2_encoding_basic
// sort.Slice(response.CommonPrefixes, func(i, j int) bool { return response.CommonPrefixes[i].Prefix < response.CommonPrefixes[j].Prefix })
response.EncodingType = s3.EncodingTypeUrl response.EncodingType = s3.EncodingTypeUrl
} }
return nil return nil

Loading…
Cancel
Save