Browse Source
add s3 upload, and removing mono and multi part upload analyzer
add s3 upload, and removing mono and multi part upload analyzer
removing mono and multi part upload analyzer, which were used just to determine the file namepull/693/head
Chris Lu
7 years ago
8 changed files with 139 additions and 59 deletions
-
5weed/operation/assign_file_id.go
-
8weed/s3api/s3api_errors.go
-
12weed/s3api/s3api_handlers.go
-
90weed/s3api/s3api_object_handlers.go
-
4weed/s3api/s3api_server.go
-
31weed/server/filer_server_handlers_write.go
-
18weed/server/filer_server_handlers_write_monopart.go
-
30weed/server/filer_server_handlers_write_multipart.go
@ -0,0 +1,90 @@ |
|||
package s3api |
|||
|
|||
import ( |
|||
"net/http" |
|||
"github.com/chrislusf/seaweedfs/weed/glog" |
|||
"fmt" |
|||
"github.com/gorilla/mux" |
|||
"io/ioutil" |
|||
"encoding/json" |
|||
) |
|||
|
|||
var ( |
|||
client *http.Client |
|||
) |
|||
|
|||
func init() { |
|||
client = &http.Client{Transport: &http.Transport{ |
|||
MaxIdleConnsPerHost: 1024, |
|||
}} |
|||
} |
|||
|
|||
type UploadResult struct { |
|||
Name string `json:"name,omitempty"` |
|||
Size uint32 `json:"size,omitempty"` |
|||
Error string `json:"error,omitempty"` |
|||
} |
|||
|
|||
func (s3a *S3ApiServer) PutObjectHandler(w http.ResponseWriter, r *http.Request) { |
|||
|
|||
// http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html
|
|||
|
|||
vars := mux.Vars(r) |
|||
bucket := vars["bucket"] |
|||
object := vars["object"] |
|||
|
|||
_, err := validateContentMd5(r.Header) |
|||
if err != nil { |
|||
writeErrorResponse(w, ErrInvalidDigest, r.URL) |
|||
return |
|||
} |
|||
|
|||
uploadUrl := fmt.Sprintf("http://%s%s/%s/%s?collection=%s", |
|||
s3a.option.Filer, s3a.option.BucketsPath, bucket, object, bucket) |
|||
proxyReq, err := http.NewRequest("PUT", uploadUrl, r.Body) |
|||
|
|||
if err != nil { |
|||
glog.Errorf("NewRequest %s: %v", uploadUrl, err) |
|||
writeErrorResponse(w, ErrInternalError, r.URL) |
|||
return |
|||
} |
|||
|
|||
proxyReq.Header.Set("Host", s3a.option.Filer) |
|||
proxyReq.Header.Set("X-Forwarded-For", r.RemoteAddr) |
|||
|
|||
for header, values := range r.Header { |
|||
for _, value := range values { |
|||
proxyReq.Header.Add(header, value) |
|||
} |
|||
} |
|||
|
|||
resp, postErr := client.Do(proxyReq) |
|||
|
|||
if postErr != nil { |
|||
glog.Errorf("post to filer: %v", postErr) |
|||
writeErrorResponse(w, ErrInternalError, r.URL) |
|||
return |
|||
} |
|||
defer resp.Body.Close() |
|||
|
|||
resp_body, ra_err := ioutil.ReadAll(resp.Body) |
|||
if ra_err != nil { |
|||
glog.Errorf("upload to filer response read: %v", ra_err) |
|||
writeErrorResponse(w, ErrInternalError, r.URL) |
|||
return |
|||
} |
|||
var ret UploadResult |
|||
unmarshal_err := json.Unmarshal(resp_body, &ret) |
|||
if unmarshal_err != nil { |
|||
glog.Errorf("failing to read upload to %s : %v", uploadUrl, string(resp_body)) |
|||
writeErrorResponse(w, ErrInternalError, r.URL) |
|||
return |
|||
} |
|||
if ret.Error != "" { |
|||
glog.Errorf("upload to filer error: %v", ret.Error) |
|||
writeErrorResponse(w, ErrInternalError, r.URL) |
|||
return |
|||
} |
|||
|
|||
writeSuccessResponseEmpty(w) |
|||
} |
@ -1,39 +1,13 @@ |
|||
package weed_server |
|||
|
|||
import ( |
|||
"bytes" |
|||
"io/ioutil" |
|||
"net/http" |
|||
"strings" |
|||
|
|||
"github.com/chrislusf/seaweedfs/weed/glog" |
|||
"github.com/chrislusf/seaweedfs/weed/storage" |
|||
) |
|||
|
|||
func (fs *FilerServer) multipartUploadAnalyzer(w http.ResponseWriter, r *http.Request, replication, collection string, dataCenter string) (fileId, urlLocation string, err error) { |
|||
func (fs *FilerServer) multipartUploadAnalyzer(w http.ResponseWriter, r *http.Request, replication, collection string, dataCenter string) (path string, err error) { |
|||
//Default handle way for http multipart
|
|||
if r.Method == "PUT" { |
|||
buf, _ := ioutil.ReadAll(r.Body) |
|||
r.Body = ioutil.NopCloser(bytes.NewBuffer(buf)) |
|||
fileName, _, _, _, _, _, _, _, pe := storage.ParseUpload(r) |
|||
if pe != nil { |
|||
glog.V(0).Infoln("failing to parse post body", pe.Error()) |
|||
writeJsonError(w, r, http.StatusInternalServerError, pe) |
|||
err = pe |
|||
return |
|||
} |
|||
//reconstruct http request body for following new request to volume server
|
|||
r.Body = ioutil.NopCloser(bytes.NewBuffer(buf)) |
|||
|
|||
path := r.URL.Path |
|||
if strings.HasSuffix(path, "/") { |
|||
if fileName != "" { |
|||
path += fileName |
|||
} |
|||
} |
|||
fileId, urlLocation, err = fs.queryFileInfoByPath(w, r, path) |
|||
} else { |
|||
fileId, urlLocation, err = fs.assignNewFileInfo(w, r, replication, collection, dataCenter) |
|||
path = r.URL.Path |
|||
} |
|||
return |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue