From ffb5d3f93a69d36b13bd6ee0b52aa595521774ee Mon Sep 17 00:00:00 2001 From: Chris Lu Date: Tue, 1 Jan 2019 02:14:40 -0800 Subject: [PATCH] get bytes from sync pool --- weed/filesys/dirty_page.go | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/weed/filesys/dirty_page.go b/weed/filesys/dirty_page.go index 871905fd5..c69851071 100644 --- a/weed/filesys/dirty_page.go +++ b/weed/filesys/dirty_page.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "fmt" + "sync/atomic" "time" "github.com/chrislusf/seaweedfs/weed/glog" @@ -21,12 +22,6 @@ type ContinuousDirtyPages struct { lock sync.Mutex } -var bufPool = sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, -} - func newDirtyPages(file *File) *ContinuousDirtyPages { return &ContinuousDirtyPages{ Data: nil, @@ -37,9 +32,14 @@ func newDirtyPages(file *File) *ContinuousDirtyPages { func (pages *ContinuousDirtyPages) releaseResource() { if pages.Data != nil { pages.f.wfs.bufPool.Put(pages.Data) + pages.Data = nil + atomic.AddInt32(&counter, -1) + glog.V(3).Infof("%s/%s releasing resource", pages.f.dir.Path, pages.f.Name, counter) } } +var counter = int32(0) + func (pages *ContinuousDirtyPages) AddPage(ctx context.Context, offset int64, data []byte) (chunks []*filer_pb.FileChunk, err error) { pages.lock.Lock() @@ -47,15 +47,17 @@ func (pages *ContinuousDirtyPages) AddPage(ctx context.Context, offset int64, da var chunk *filer_pb.FileChunk - if pages.Data == nil { - pages.Data = pages.f.wfs.bufPool.Get().([]byte) - } - - if len(data) > len(pages.Data) { + if len(data) > int(pages.f.wfs.option.ChunkSizeLimit) { // this is more than what buffer can hold. return pages.flushAndSave(ctx, offset, data) } + if pages.Data == nil { + pages.Data = pages.f.wfs.bufPool.Get().([]byte) + atomic.AddInt32(&counter, 1) + glog.V(3).Infof("%s/%s acquire resource", pages.f.dir.Path, pages.f.Name, counter) + } + if offset < pages.Offset || offset >= pages.Offset+int64(len(pages.Data)) || pages.Offset+int64(len(pages.Data)) < offset+int64(len(data)) { // if the data is out of range,