Browse Source

fix url parsing

pull/7481/head
chrislu 3 weeks ago
parent
commit
a26706fcac
  1. 16
      weed/s3api/s3api_object_handlers_put.go

16
weed/s3api/s3api_object_handlers_put.go

@ -283,18 +283,22 @@ func (s3a *S3ApiServer) putToFiler(r *http.Request, uploadUrl string, dataReader
}
// Parse the upload URL to extract the file path
// uploadUrl format: http://filer:8888/path/to/bucket/object
// The uploadUrl is URL-encoded by toFilerUrl(), so we need to decode it
filePath := strings.TrimPrefix(uploadUrl, "http://"+string(s3a.option.Filer))
// uploadUrl format: http://filer:8888/path/to/bucket/object (or https://, IPv6, etc.)
// Use proper URL parsing instead of string manipulation for robustness
parsedUrl, parseErr := url.Parse(uploadUrl)
if parseErr != nil {
glog.Errorf("putToFiler: failed to parse uploadUrl %q: %v", uploadUrl, parseErr)
return "", s3err.ErrInternalError, ""
}
// URL-decode the path to get the actual file path
// This is critical because toFilerUrl() encodes special characters like (, ), etc.
decodedPath, decodeErr := url.PathUnescape(filePath)
decodedPath, decodeErr := url.PathUnescape(parsedUrl.Path)
if decodeErr != nil {
glog.Errorf("putToFiler: failed to decode path %q: %v", filePath, decodeErr)
glog.Errorf("putToFiler: failed to decode path %q: %v", parsedUrl.Path, decodeErr)
return "", s3err.ErrInternalError, ""
}
filePath = decodedPath
filePath := decodedPath
// Step 1 & 2: Use auto-chunking to handle large files without OOM
// This splits large uploads into 8MB chunks, preventing memory issues on both S3 API and volume servers

Loading…
Cancel
Save