Browse Source

merge chunks during upload (#4130)

* merge chunks during upload

* fix test
pull/4652/head
Chris Lu 2 years ago
committed by GitHub
parent
commit
1cd2e64aac
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 10
      weed/filer/filechunk_section_test.go
  2. 14
      weed/mount/page_writer/page_chunk_mem.go
  3. 15
      weed/mount/page_writer/page_chunk_swapfile.go

10
weed/filer/filechunk_section_test.go

@ -7,31 +7,31 @@ import (
func Test_removeGarbageChunks(t *testing.T) { func Test_removeGarbageChunks(t *testing.T) {
section := NewFileChunkSection(0) section := NewFileChunkSection(0)
section.addChunk(&filer_pb.FileChunk{
section.chunks = append(section.chunks, &filer_pb.FileChunk{
FileId: "0", FileId: "0",
Offset: 0, Offset: 0,
Size: 1, Size: 1,
ModifiedTsNs: 0, ModifiedTsNs: 0,
}) })
section.addChunk(&filer_pb.FileChunk{
section.chunks = append(section.chunks, &filer_pb.FileChunk{
FileId: "1", FileId: "1",
Offset: 1, Offset: 1,
Size: 1, Size: 1,
ModifiedTsNs: 1, ModifiedTsNs: 1,
}) })
section.addChunk(&filer_pb.FileChunk{
section.chunks = append(section.chunks, &filer_pb.FileChunk{
FileId: "2", FileId: "2",
Offset: 2, Offset: 2,
Size: 1, Size: 1,
ModifiedTsNs: 2, ModifiedTsNs: 2,
}) })
section.addChunk(&filer_pb.FileChunk{
section.chunks = append(section.chunks, &filer_pb.FileChunk{
FileId: "3", FileId: "3",
Offset: 3, Offset: 3,
Size: 1, Size: 1,
ModifiedTsNs: 3, ModifiedTsNs: 3,
}) })
section.addChunk(&filer_pb.FileChunk{
section.chunks = append(section.chunks, &filer_pb.FileChunk{
FileId: "4", FileId: "4",
Offset: 4, Offset: 4,
Size: 1, Size: 1,

14
weed/mount/page_writer/page_chunk_mem.go

@ -105,4 +105,18 @@ func (mc *MemChunk) SaveContent(saveFn SaveToStorageFunc) {
saveFn(reader, int64(mc.logicChunkIndex)*mc.chunkSize+t.StartOffset, t.Size(), t.TsNs, func() { saveFn(reader, int64(mc.logicChunkIndex)*mc.chunkSize+t.StartOffset, t.Size(), t.TsNs, func() {
}) })
} }
for t := mc.usage.head.next; t != mc.usage.tail; t = t.next {
startOffset := t.StartOffset
stopOffset := t.stopOffset
tsNs := t.TsNs
for t != mc.usage.tail && t.next.StartOffset == stopOffset {
stopOffset = t.next.stopOffset
t = t.next
tsNs = max(tsNs, t.TsNs)
}
reader := util.NewBytesReader(mc.buf[startOffset:stopOffset])
saveFn(reader, int64(mc.logicChunkIndex)*mc.chunkSize+startOffset, stopOffset-startOffset, tsNs, func() {
})
}
} }

15
weed/mount/page_writer/page_chunk_swapfile.go

@ -176,11 +176,20 @@ func (sc *SwapFileChunk) SaveContent(saveFn SaveToStorageFunc) {
} }
// println(sc.logicChunkIndex, "|", "save") // println(sc.logicChunkIndex, "|", "save")
for t := sc.usage.head.next; t != sc.usage.tail; t = t.next { for t := sc.usage.head.next; t != sc.usage.tail; t = t.next {
data := mem.Allocate(int(t.Size()))
n, _ := sc.swapfile.file.ReadAt(data, t.StartOffset+int64(sc.actualChunkIndex)*sc.swapfile.chunkSize)
startOffset := t.StartOffset
stopOffset := t.stopOffset
tsNs := t.TsNs
for t != sc.usage.tail && t.next.StartOffset == stopOffset {
stopOffset = t.next.stopOffset
t = t.next
tsNs = max(tsNs, t.TsNs)
}
data := mem.Allocate(int(stopOffset - startOffset))
n, _ := sc.swapfile.file.ReadAt(data, startOffset+int64(sc.actualChunkIndex)*sc.swapfile.chunkSize)
if n > 0 { if n > 0 {
reader := util.NewBytesReader(data[:n]) reader := util.NewBytesReader(data[:n])
saveFn(reader, int64(sc.logicChunkIndex)*sc.swapfile.chunkSize+t.StartOffset, int64(n), t.TsNs, func() {
saveFn(reader, int64(sc.logicChunkIndex)*sc.swapfile.chunkSize+startOffset, int64(n), tsNs, func() {
}) })
} }
mem.Free(data) mem.Free(data)

Loading…
Cancel
Save