Chris Lu
5 years ago
10 changed files with 354 additions and 212 deletions
-
12weed/operation/upload_content.go
-
14weed/server/common.go
-
11weed/server/filer_server_handlers_read.go
-
19weed/server/filer_server_handlers_write.go
-
22weed/server/filer_server_handlers_write_autochunk.go
-
103weed/server/filer_server_handlers_write_cipher.go
-
74weed/storage/needle/needle.go
-
118weed/storage/needle/needle_parse_multipart.go
-
166weed/storage/needle/needle_parse_upload.go
-
21weed/util/cipher.go
@ -0,0 +1,103 @@ |
|||||
|
package weed_server |
||||
|
|
||||
|
import ( |
||||
|
"bytes" |
||||
|
"context" |
||||
|
"fmt" |
||||
|
"net/http" |
||||
|
"strings" |
||||
|
"time" |
||||
|
|
||||
|
"github.com/chrislusf/seaweedfs/weed/filer2" |
||||
|
"github.com/chrislusf/seaweedfs/weed/glog" |
||||
|
"github.com/chrislusf/seaweedfs/weed/operation" |
||||
|
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb" |
||||
|
"github.com/chrislusf/seaweedfs/weed/storage/needle" |
||||
|
"github.com/chrislusf/seaweedfs/weed/util" |
||||
|
) |
||||
|
|
||||
|
// handling single chunk POST or PUT upload
|
||||
|
func (fs *FilerServer) encrypt(ctx context.Context, w http.ResponseWriter, r *http.Request, |
||||
|
replication string, collection string, dataCenter string) (filerResult *FilerPostResult, err error) { |
||||
|
|
||||
|
fileId, urlLocation, auth, err := fs.assignNewFileInfo(w, r, replication, collection, dataCenter) |
||||
|
|
||||
|
if err != nil || fileId == "" || urlLocation == "" { |
||||
|
return nil, fmt.Errorf("fail to allocate volume for %s, collection:%s, datacenter:%s", r.URL.Path, collection, dataCenter) |
||||
|
} |
||||
|
|
||||
|
glog.V(4).Infof("write %s to %v", r.URL.Path, urlLocation) |
||||
|
|
||||
|
// Note: gzip(cipher(data)), cipher data first, then gzip
|
||||
|
|
||||
|
sizeLimit := int64(fs.option.MaxMB) * 1024 * 1024 |
||||
|
|
||||
|
pu, err := needle.ParseUpload(r, sizeLimit) |
||||
|
data := pu.Data |
||||
|
uncompressedData := pu.Data |
||||
|
cipherKey := util.GenCipherKey() |
||||
|
if pu.IsGzipped { |
||||
|
uncompressedData = pu.UncompressedData |
||||
|
data, err = util.Encrypt(pu.UncompressedData, cipherKey) |
||||
|
if err != nil { |
||||
|
return nil, fmt.Errorf("encrypt input: %v", err) |
||||
|
} |
||||
|
} |
||||
|
if pu.MimeType == "" { |
||||
|
pu.MimeType = http.DetectContentType(uncompressedData) |
||||
|
} |
||||
|
|
||||
|
uploadResult, uploadError := operation.Upload(urlLocation, pu.FileName, true, bytes.NewReader(data), pu.IsGzipped, "", pu.PairMap, auth) |
||||
|
if uploadError != nil { |
||||
|
return nil, fmt.Errorf("upload to volume server: %v", uploadError) |
||||
|
} |
||||
|
|
||||
|
// Save to chunk manifest structure
|
||||
|
fileChunks := []*filer_pb.FileChunk{ |
||||
|
{ |
||||
|
FileId: fileId, |
||||
|
Offset: 0, |
||||
|
Size: uint64(uploadResult.Size), |
||||
|
Mtime: time.Now().UnixNano(), |
||||
|
ETag: uploadResult.ETag, |
||||
|
CipherKey: uploadResult.CipherKey, |
||||
|
}, |
||||
|
} |
||||
|
|
||||
|
path := r.URL.Path |
||||
|
if strings.HasSuffix(path, "/") { |
||||
|
if pu.FileName != "" { |
||||
|
path += pu.FileName |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
entry := &filer2.Entry{ |
||||
|
FullPath: filer2.FullPath(path), |
||||
|
Attr: filer2.Attr{ |
||||
|
Mtime: time.Now(), |
||||
|
Crtime: time.Now(), |
||||
|
Mode: 0660, |
||||
|
Uid: OS_UID, |
||||
|
Gid: OS_GID, |
||||
|
Replication: replication, |
||||
|
Collection: collection, |
||||
|
TtlSec: int32(util.ParseInt(r.URL.Query().Get("ttl"), 0)), |
||||
|
Mime: pu.MimeType, |
||||
|
}, |
||||
|
Chunks: fileChunks, |
||||
|
} |
||||
|
|
||||
|
filerResult = &FilerPostResult{ |
||||
|
Name: pu.FileName, |
||||
|
Size: int64(pu.OriginalDataSize), |
||||
|
} |
||||
|
|
||||
|
if dbErr := fs.filer.CreateEntry(ctx, entry, false); dbErr != nil { |
||||
|
fs.filer.DeleteChunks(entry.Chunks) |
||||
|
err = dbErr |
||||
|
filerResult.Error = dbErr.Error() |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
return |
||||
|
} |
@ -1,118 +0,0 @@ |
|||||
package needle |
|
||||
|
|
||||
import ( |
|
||||
"fmt" |
|
||||
"io" |
|
||||
"io/ioutil" |
|
||||
"mime" |
|
||||
"net/http" |
|
||||
"path" |
|
||||
"strconv" |
|
||||
"strings" |
|
||||
|
|
||||
"github.com/chrislusf/seaweedfs/weed/glog" |
|
||||
"github.com/chrislusf/seaweedfs/weed/util" |
|
||||
) |
|
||||
|
|
||||
func parseMultipart(r *http.Request, sizeLimit int64) ( |
|
||||
fileName string, data []byte, mimeType string, isGzipped bool, originalDataSize int, isChunkedFile bool, e error) { |
|
||||
defer func() { |
|
||||
if e != nil && r.Body != nil { |
|
||||
io.Copy(ioutil.Discard, r.Body) |
|
||||
r.Body.Close() |
|
||||
} |
|
||||
}() |
|
||||
form, fe := r.MultipartReader() |
|
||||
if fe != nil { |
|
||||
glog.V(0).Infoln("MultipartReader [ERROR]", fe) |
|
||||
e = fe |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
//first multi-part item
|
|
||||
part, fe := form.NextPart() |
|
||||
if fe != nil { |
|
||||
glog.V(0).Infoln("Reading Multi part [ERROR]", fe) |
|
||||
e = fe |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
fileName = part.FileName() |
|
||||
if fileName != "" { |
|
||||
fileName = path.Base(fileName) |
|
||||
} |
|
||||
|
|
||||
data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1)) |
|
||||
if e != nil { |
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", e) |
|
||||
return |
|
||||
} |
|
||||
if len(data) == int(sizeLimit)+1 { |
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
//if the filename is empty string, do a search on the other multi-part items
|
|
||||
for fileName == "" { |
|
||||
part2, fe := form.NextPart() |
|
||||
if fe != nil { |
|
||||
break // no more or on error, just safely break
|
|
||||
} |
|
||||
|
|
||||
fName := part2.FileName() |
|
||||
|
|
||||
//found the first <file type> multi-part has filename
|
|
||||
if fName != "" { |
|
||||
data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1)) |
|
||||
if fe2 != nil { |
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", fe2) |
|
||||
e = fe2 |
|
||||
return |
|
||||
} |
|
||||
if len(data) == int(sizeLimit)+1 { |
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
//update
|
|
||||
data = data2 |
|
||||
fileName = path.Base(fName) |
|
||||
break |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
originalDataSize = len(data) |
|
||||
|
|
||||
isChunkedFile, _ = strconv.ParseBool(r.FormValue("cm")) |
|
||||
|
|
||||
if !isChunkedFile { |
|
||||
|
|
||||
dotIndex := strings.LastIndex(fileName, ".") |
|
||||
ext, mtype := "", "" |
|
||||
if dotIndex > 0 { |
|
||||
ext = strings.ToLower(fileName[dotIndex:]) |
|
||||
mtype = mime.TypeByExtension(ext) |
|
||||
} |
|
||||
contentType := part.Header.Get("Content-Type") |
|
||||
if contentType != "" && mtype != contentType { |
|
||||
mimeType = contentType //only return mime type if not deductable
|
|
||||
mtype = contentType |
|
||||
} |
|
||||
|
|
||||
if part.Header.Get("Content-Encoding") == "gzip" { |
|
||||
if unzipped, e := util.UnGzipData(data); e == nil { |
|
||||
originalDataSize = len(unzipped) |
|
||||
} |
|
||||
isGzipped = true |
|
||||
} else if util.IsGzippable(ext, mtype, data) { |
|
||||
if compressedData, err := util.GzipData(data); err == nil { |
|
||||
if len(data) > len(compressedData) { |
|
||||
data = compressedData |
|
||||
isGzipped = true |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
return |
|
||||
} |
|
@ -0,0 +1,166 @@ |
|||||
|
package needle |
||||
|
|
||||
|
import ( |
||||
|
"fmt" |
||||
|
"io" |
||||
|
"io/ioutil" |
||||
|
"mime" |
||||
|
"net/http" |
||||
|
"path" |
||||
|
"strconv" |
||||
|
"strings" |
||||
|
|
||||
|
"github.com/chrislusf/seaweedfs/weed/glog" |
||||
|
"github.com/chrislusf/seaweedfs/weed/util" |
||||
|
) |
||||
|
|
||||
|
type ParsedUpload struct { |
||||
|
FileName string |
||||
|
Data []byte |
||||
|
MimeType string |
||||
|
PairMap map[string]string |
||||
|
IsGzipped bool |
||||
|
OriginalDataSize int |
||||
|
ModifiedTime uint64 |
||||
|
Ttl *TTL |
||||
|
IsChunkedFile bool |
||||
|
UncompressedData []byte |
||||
|
} |
||||
|
|
||||
|
func ParseUpload(r *http.Request, sizeLimit int64) (pu *ParsedUpload, e error) { |
||||
|
pu = &ParsedUpload{} |
||||
|
pu.PairMap = make(map[string]string) |
||||
|
for k, v := range r.Header { |
||||
|
if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) { |
||||
|
pu.PairMap[k] = v[0] |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
if r.Method == "POST" { |
||||
|
e = parseMultipart(r, sizeLimit, pu) |
||||
|
} else { |
||||
|
e = parsePut(r, sizeLimit, pu) |
||||
|
} |
||||
|
if e != nil { |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
pu.ModifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64) |
||||
|
pu.Ttl, _ = ReadTTL(r.FormValue("ttl")) |
||||
|
|
||||
|
pu.OriginalDataSize = len(pu.Data) |
||||
|
pu.UncompressedData = pu.Data |
||||
|
if pu.IsGzipped { |
||||
|
if unzipped, e := util.UnGzipData(pu.Data); e == nil { |
||||
|
pu.OriginalDataSize = len(unzipped) |
||||
|
pu.UncompressedData = unzipped |
||||
|
} |
||||
|
} else if shouldGzip, _ := util.IsGzippableFileType("", pu.MimeType); shouldGzip { |
||||
|
if compressedData, err := util.GzipData(pu.Data); err == nil { |
||||
|
pu.Data = compressedData |
||||
|
pu.IsGzipped = true |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return |
||||
|
} |
||||
|
|
||||
|
func parsePut(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) { |
||||
|
pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip" |
||||
|
pu.MimeType = r.Header.Get("Content-Type") |
||||
|
pu.FileName = "" |
||||
|
pu.Data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1)) |
||||
|
if e == io.EOF || int64(pu.OriginalDataSize) == sizeLimit+1 { |
||||
|
io.Copy(ioutil.Discard, r.Body) |
||||
|
} |
||||
|
r.Body.Close() |
||||
|
return nil |
||||
|
} |
||||
|
|
||||
|
func parseMultipart(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) { |
||||
|
defer func() { |
||||
|
if e != nil && r.Body != nil { |
||||
|
io.Copy(ioutil.Discard, r.Body) |
||||
|
r.Body.Close() |
||||
|
} |
||||
|
}() |
||||
|
form, fe := r.MultipartReader() |
||||
|
if fe != nil { |
||||
|
glog.V(0).Infoln("MultipartReader [ERROR]", fe) |
||||
|
e = fe |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
//first multi-part item
|
||||
|
part, fe := form.NextPart() |
||||
|
if fe != nil { |
||||
|
glog.V(0).Infoln("Reading Multi part [ERROR]", fe) |
||||
|
e = fe |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
pu.FileName = part.FileName() |
||||
|
if pu.FileName != "" { |
||||
|
pu.FileName = path.Base(pu.FileName) |
||||
|
} |
||||
|
|
||||
|
pu.Data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1)) |
||||
|
if e != nil { |
||||
|
glog.V(0).Infoln("Reading Content [ERROR]", e) |
||||
|
return |
||||
|
} |
||||
|
if len(pu.Data) == int(sizeLimit)+1 { |
||||
|
e = fmt.Errorf("file over the limited %d bytes", sizeLimit) |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
//if the filename is empty string, do a search on the other multi-part items
|
||||
|
for pu.FileName == "" { |
||||
|
part2, fe := form.NextPart() |
||||
|
if fe != nil { |
||||
|
break // no more or on error, just safely break
|
||||
|
} |
||||
|
|
||||
|
fName := part2.FileName() |
||||
|
|
||||
|
//found the first <file type> multi-part has filename
|
||||
|
if fName != "" { |
||||
|
data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1)) |
||||
|
if fe2 != nil { |
||||
|
glog.V(0).Infoln("Reading Content [ERROR]", fe2) |
||||
|
e = fe2 |
||||
|
return |
||||
|
} |
||||
|
if len(data2) == int(sizeLimit)+1 { |
||||
|
e = fmt.Errorf("file over the limited %d bytes", sizeLimit) |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
//update
|
||||
|
pu.Data = data2 |
||||
|
pu.FileName = path.Base(fName) |
||||
|
break |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
pu.IsChunkedFile, _ = strconv.ParseBool(r.FormValue("cm")) |
||||
|
|
||||
|
if !pu.IsChunkedFile { |
||||
|
|
||||
|
dotIndex := strings.LastIndex(pu.FileName, ".") |
||||
|
ext, mtype := "", "" |
||||
|
if dotIndex > 0 { |
||||
|
ext = strings.ToLower(pu.FileName[dotIndex:]) |
||||
|
mtype = mime.TypeByExtension(ext) |
||||
|
} |
||||
|
contentType := part.Header.Get("Content-Type") |
||||
|
if contentType != "" && contentType != "application/octet-stream" && mtype != contentType { |
||||
|
pu.MimeType = contentType //only return mime type if not deductable
|
||||
|
mtype = contentType |
||||
|
} |
||||
|
|
||||
|
pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip" |
||||
|
} |
||||
|
|
||||
|
return |
||||
|
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue