Browse Source

s3: copy object API needs to escape special characters

fix https://github.com/chrislusf/seaweedfs/issues/2482
pull/2500/head
chrislu 3 years ago
parent
commit
5c3b783310
  1. 2
      weed/s3api/filer_multipart.go
  2. 6
      weed/s3api/s3api_object_copy_handlers.go

2
weed/s3api/filer_multipart.go

@ -142,7 +142,7 @@ func (s3a *S3ApiServer) completeMultipartUpload(input *s3.CompleteMultipartUploa
output = &CompleteMultipartUploadResult{
CompleteMultipartUploadOutput: s3.CompleteMultipartUploadOutput{
Location: aws.String(fmt.Sprintf("http://%s%s/%s", s3a.option.Filer.ToHttpAddress(), dirName, entryName)),
Location: aws.String(fmt.Sprintf("http://%s%s/%s", s3a.option.Filer.ToHttpAddress(), urlPathEscape(dirName), urlPathEscape(entryName))),
Bucket: input.Bucket,
ETag: aws.String("\"" + filer.ETagChunks(finalParts) + "\""),
Key: objectKey(input.Key),

6
weed/s3api/s3api_object_copy_handlers.go

@ -69,9 +69,9 @@ func (s3a *S3ApiServer) CopyObjectHandler(w http.ResponseWriter, r *http.Request
}
dstUrl := fmt.Sprintf("http://%s%s/%s%s?collection=%s",
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, dstBucket, dstObject, dstBucket)
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, dstBucket, urlPathEscape(dstObject), dstBucket)
srcUrl := fmt.Sprintf("http://%s%s/%s%s",
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, srcBucket, srcObject)
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, srcBucket, urlPathEscape(srcObject))
_, _, resp, err := util.DownloadFile(srcUrl, "")
if err != nil {
@ -154,7 +154,7 @@ func (s3a *S3ApiServer) CopyObjectPartHandler(w http.ResponseWriter, r *http.Req
dstUrl := fmt.Sprintf("http://%s%s/%s/%04d.part?collection=%s",
s3a.option.Filer.ToHttpAddress(), s3a.genUploadsFolder(dstBucket), uploadID, partID, dstBucket)
srcUrl := fmt.Sprintf("http://%s%s/%s%s",
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, srcBucket, srcObject)
s3a.option.Filer.ToHttpAddress(), s3a.option.BucketsPath, srcBucket, urlPathEscape(srcObject))
dataReader, err := util.ReadUrlAsReaderCloser(srcUrl, rangeHeader)
if err != nil {

Loading…
Cancel
Save