Browse Source

`FilePart.Upload` use base name instead of full path

fix chunked file download error
pull/224/head
tnextday 9 years ago
parent
commit
d0e2475ece
  1. 8
      go/operation/chunked_file.go
  2. 7
      go/operation/submit.go
  3. 16
      go/weed/weed_server/volume_server_handlers_read.go

8
go/operation/chunked_file.go

@ -22,9 +22,9 @@ var (
) )
type ChunkInfo struct { type ChunkInfo struct {
Fid string `json:"fid,omitempty"`
Offset int64 `json:"offset,omitempty"`
Size int64 `json:"size,omitempty"`
Fid string `json:"fid"`
Offset int64 `json:"offset"`
Size int64 `json:"size"`
} }
type ChunkList []*ChunkInfo type ChunkList []*ChunkInfo
@ -52,7 +52,7 @@ func (s ChunkList) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func LoadChunkedManifest(buffer []byte) (*ChunkManifest, error) { func LoadChunkedManifest(buffer []byte) (*ChunkManifest, error) {
cm := ChunkManifest{} cm := ChunkManifest{}
if e := json.Unmarshal(buffer, cm); e != nil {
if e := json.Unmarshal(buffer, &cm); e != nil {
return nil, e return nil, e
} }
sort.Sort(cm.Chunks) sort.Sort(cm.Chunks)

7
go/operation/submit.go

@ -116,11 +116,12 @@ func (fi FilePart) Upload(maxMB int, master string, secret security.Secret) (ret
if closer, ok := fi.Reader.(io.Closer); ok { if closer, ok := fi.Reader.(io.Closer); ok {
defer closer.Close() defer closer.Close()
} }
baseName := path.Base(fi.FileName)
if maxMB > 0 && fi.FileSize > int64(maxMB*1024*1024) { if maxMB > 0 && fi.FileSize > int64(maxMB*1024*1024) {
chunkSize := int64(maxMB * 1024 * 1024) chunkSize := int64(maxMB * 1024 * 1024)
chunks := fi.FileSize/chunkSize + 1 chunks := fi.FileSize/chunkSize + 1
cm := ChunkManifest{ cm := ChunkManifest{
Name: fi.FileName,
Name: baseName,
Size: fi.FileSize, Size: fi.FileSize,
Mime: fi.MimeType, Mime: fi.MimeType,
Chunks: make([]*ChunkInfo, 0, chunks), Chunks: make([]*ChunkInfo, 0, chunks),
@ -128,7 +129,7 @@ func (fi FilePart) Upload(maxMB int, master string, secret security.Secret) (ret
for i := int64(0); i < chunks; i++ { for i := int64(0); i < chunks; i++ {
id, count, e := upload_one_chunk( id, count, e := upload_one_chunk(
fi.FileName+"-"+strconv.FormatInt(i+1, 10),
baseName+"-"+strconv.FormatInt(i+1, 10),
io.LimitReader(fi.Reader, chunkSize), io.LimitReader(fi.Reader, chunkSize),
master, fi.Replication, fi.Collection, fi.Ttl, master, fi.Replication, fi.Collection, fi.Ttl,
jwt) jwt)
@ -152,7 +153,7 @@ func (fi FilePart) Upload(maxMB int, master string, secret security.Secret) (ret
cm.DeleteChunks(master) cm.DeleteChunks(master)
} }
} else { } else {
ret, e := Upload(fileUrl, fi.FileName, fi.Reader, fi.IsGzipped, fi.MimeType, jwt)
ret, e := Upload(fileUrl, baseName, fi.Reader, fi.IsGzipped, fi.MimeType, jwt)
if e != nil { if e != nil {
return 0, e return 0, e
} }

16
go/weed/weed_server/volume_server_handlers_read.go

@ -225,18 +225,22 @@ func (vs *VolumeServer) tryHandleChunkedFile(n *storage.Needle, fileName string,
if !n.IsChunkedManifest() { if !n.IsChunkedManifest() {
return false return false
} }
processed = true
raw, _ := strconv.ParseBool(r.FormValue("raw")) raw, _ := strconv.ParseBool(r.FormValue("raw"))
if raw { if raw {
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Content-Length", strconv.Itoa(len(n.Data)))
if _, e := w.Write(n.Data); e != nil {
glog.V(0).Infoln("response write error:", e)
return false
}
processed = true
if n.IsGzipped(){
var err error
if n.Data, err = storage.UnGzipData(n.Data); err != nil {
glog.V(0).Infoln("ungzip data error:", err, r.URL.Path)
return false
} }
return true
} }
chunkManifest, e := operation.LoadChunkedManifest(n.Data) chunkManifest, e := operation.LoadChunkedManifest(n.Data)
if e != nil { if e != nil {
glog.V(0).Infoln("load chunked manifest error:", e)
return false return false
} }
ext := "" ext := ""

Loading…
Cancel
Save