From 99ecf63276b3b33351f316c4d3ea7269d591a3cb Mon Sep 17 00:00:00 2001 From: Chris Lu Date: Sat, 29 Aug 2020 22:28:33 -0700 Subject: [PATCH] go fmt --- .../filer2/abstract_sql/abstract_sql_store.go | 2 +- weed/filer2/filechunk_manifest.go | 1 - weed/filer2/filechunks.go | 2 +- weed/filer2/filechunks2_test.go | 24 +++++----- weed/filer2/filechunks_test.go | 8 ++-- weed/filer2/filer.go | 2 +- weed/filer2/reader_at.go | 2 +- weed/filer2/reader_at_test.go | 48 +++++++++---------- weed/filesys/dirty_page_interval_test.go | 6 +-- weed/filesys/fscache_test.go | 2 - weed/s3api/s3api_objects_list_handlers.go | 24 +++++----- .../filer_server_handlers_write_autochunk.go | 1 - weed/storage/needle/needle.go | 2 +- weed/storage/needle_map/compact_map.go | 4 +- weed/storage/needle_map/needle_value.go | 2 +- weed/storage/types/needle_types.go | 2 +- 16 files changed, 64 insertions(+), 68 deletions(-) diff --git a/weed/filer2/abstract_sql/abstract_sql_store.go b/weed/filer2/abstract_sql/abstract_sql_store.go index 957e40629..3dd4af103 100644 --- a/weed/filer2/abstract_sql/abstract_sql_store.go +++ b/weed/filer2/abstract_sql/abstract_sql_store.go @@ -73,7 +73,7 @@ func (store *AbstractSqlStore) InsertEntry(ctx context.Context, entry *filer2.En } affectedRows, err := res.RowsAffected() - if err == nil && affectedRows > 0{ + if err == nil && affectedRows > 0 { return nil } diff --git a/weed/filer2/filechunk_manifest.go b/weed/filer2/filechunk_manifest.go index bde4ddf27..e8cf564e3 100644 --- a/weed/filer2/filechunk_manifest.go +++ b/weed/filer2/filechunk_manifest.go @@ -37,7 +37,6 @@ func SeparateManifestChunks(chunks []*filer_pb.FileChunk) (manifestChunks, nonMa return } - func ResolveChunkManifest(lookupFileIdFn LookupFileIdFunctionType, chunks []*filer_pb.FileChunk) (dataChunks, manifestChunks []*filer_pb.FileChunk, manefestResolveErr error) { // TODO maybe parallel this for _, chunk := range chunks { diff --git a/weed/filer2/filechunks.go b/weed/filer2/filechunks.go index 764ba6060..53c679d6b 100644 --- a/weed/filer2/filechunks.go +++ b/weed/filer2/filechunks.go @@ -160,7 +160,7 @@ func logPrintf(name string, visibles []VisibleInterval) { for _, v := range visibles { glog.V(0).Infof("%s: [%d,%d) %s %d", name, v.start, v.stop, v.fileId, v.chunkOffset) } - */ + */ } var bufPool = sync.Pool{ diff --git a/weed/filer2/filechunks2_test.go b/weed/filer2/filechunks2_test.go index 0bdbdac28..d896da3cc 100644 --- a/weed/filer2/filechunks2_test.go +++ b/weed/filer2/filechunks2_test.go @@ -11,17 +11,17 @@ import ( func TestCompactFileChunksRealCase(t *testing.T) { chunks := []*filer_pb.FileChunk{ - {FileId:"2,512f31f2c0700a", Offset: 0, Size: 25- 0, Mtime: 5320497}, - {FileId:"6,512f2c2e24e9e8", Offset: 868352, Size: 917585- 868352, Mtime: 5320492}, - {FileId:"7,514468dd5954ca", Offset: 884736, Size: 901120- 884736, Mtime: 5325928}, - {FileId:"5,5144463173fe77", Offset: 917504, Size: 2297856- 917504, Mtime: 5325894}, - {FileId:"4,51444c7ab54e2d", Offset: 2301952, Size: 2367488-2301952, Mtime: 5325900}, - {FileId:"4,514450e643ad22", Offset: 2371584, Size: 2420736-2371584, Mtime: 5325904}, - {FileId:"6,514456a5e9e4d7", Offset: 2449408, Size: 2490368-2449408, Mtime: 5325910}, - {FileId:"3,51444f8d53eebe", Offset: 2494464, Size: 2555904-2494464, Mtime: 5325903}, - {FileId:"4,5144578b097c7e", Offset: 2560000, Size: 2596864-2560000, Mtime: 5325911}, - {FileId:"3,51445500b6b4ac", Offset: 2637824, Size: 2678784-2637824, Mtime: 5325909}, - {FileId:"1,51446285e52a61", Offset: 2695168, Size: 2715648-2695168, Mtime: 5325922}, + {FileId: "2,512f31f2c0700a", Offset: 0, Size: 25 - 0, Mtime: 5320497}, + {FileId: "6,512f2c2e24e9e8", Offset: 868352, Size: 917585 - 868352, Mtime: 5320492}, + {FileId: "7,514468dd5954ca", Offset: 884736, Size: 901120 - 884736, Mtime: 5325928}, + {FileId: "5,5144463173fe77", Offset: 917504, Size: 2297856 - 917504, Mtime: 5325894}, + {FileId: "4,51444c7ab54e2d", Offset: 2301952, Size: 2367488 - 2301952, Mtime: 5325900}, + {FileId: "4,514450e643ad22", Offset: 2371584, Size: 2420736 - 2371584, Mtime: 5325904}, + {FileId: "6,514456a5e9e4d7", Offset: 2449408, Size: 2490368 - 2449408, Mtime: 5325910}, + {FileId: "3,51444f8d53eebe", Offset: 2494464, Size: 2555904 - 2494464, Mtime: 5325903}, + {FileId: "4,5144578b097c7e", Offset: 2560000, Size: 2596864 - 2560000, Mtime: 5325911}, + {FileId: "3,51445500b6b4ac", Offset: 2637824, Size: 2678784 - 2637824, Mtime: 5325909}, + {FileId: "1,51446285e52a61", Offset: 2695168, Size: 2715648 - 2695168, Mtime: 5325922}, } printChunks("before", chunks) @@ -43,4 +43,4 @@ func printChunks(name string, chunks []*filer_pb.FileChunk) { for _, chunk := range chunks { glog.V(0).Infof("%s chunk %s [%10d,%10d)", name, chunk.GetFileIdString(), chunk.Offset, chunk.Offset+int64(chunk.Size)) } -} \ No newline at end of file +} diff --git a/weed/filer2/filechunks_test.go b/weed/filer2/filechunks_test.go index 7033cb45c..31b74a22a 100644 --- a/weed/filer2/filechunks_test.go +++ b/weed/filer2/filechunks_test.go @@ -74,7 +74,7 @@ func TestRandomFileChunksCompact(t *testing.T) { if start > stop { start, stop = stop, start } - if start + 16 < stop { + if start+16 < stop { stop = start + 16 } chunk := &filer_pb.FileChunk{ @@ -352,9 +352,9 @@ func TestChunksReading(t *testing.T) { // case 6: same updates { Chunks: []*filer_pb.FileChunk{ - {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, Mtime: 123}, - {Offset: 0, Size: 100, FileId: "def", Fid: &filer_pb.FileId{FileKey: 2}, Mtime: 123}, - {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, Mtime: 123}, + {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, Mtime: 123}, + {Offset: 0, Size: 100, FileId: "def", Fid: &filer_pb.FileId{FileKey: 2}, Mtime: 123}, + {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, Mtime: 123}, }, Offset: 0, Size: 100, diff --git a/weed/filer2/filer.go b/weed/filer2/filer.go index 4c3caec7e..a3b7709ad 100644 --- a/weed/filer2/filer.go +++ b/weed/filer2/filer.go @@ -18,7 +18,7 @@ import ( const ( LogFlushInterval = time.Minute - PaginationSize = 1024 * 256 + PaginationSize = 1024 * 256 ) var ( diff --git a/weed/filer2/reader_at.go b/weed/filer2/reader_at.go index 0bf528a42..c22f20379 100644 --- a/weed/filer2/reader_at.go +++ b/weed/filer2/reader_at.go @@ -109,7 +109,7 @@ func (c *ChunkReadAt) doReadAt(p []byte, offset int64) (n int, err error) { glog.V(4).Infof("doReadAt [%d,%d), n:%v, err:%v", offset, offset+int64(len(p)), n, err) if err == nil && remaining > 0 && c.fileSize > startOffset { - delta := int(min(remaining, c.fileSize - startOffset)) + delta := int(min(remaining, c.fileSize-startOffset)) glog.V(4).Infof("zero2 [%d,%d) of file size %d bytes", startOffset, startOffset+int64(delta), c.fileSize) n += delta } diff --git a/weed/filer2/reader_at_test.go b/weed/filer2/reader_at_test.go index 7377c5dbc..7bfc9a972 100644 --- a/weed/filer2/reader_at_test.go +++ b/weed/filer2/reader_at_test.go @@ -27,33 +27,33 @@ func TestReaderAt(t *testing.T) { visibles := []VisibleInterval{ { - start: 1, - stop: 2, - fileId: "1", + start: 1, + stop: 2, + fileId: "1", chunkSize: 9, }, { - start: 3, - stop: 4, - fileId: "3", + start: 3, + stop: 4, + fileId: "3", chunkSize: 1, }, { - start: 5, - stop: 6, - fileId: "5", + start: 5, + stop: 6, + fileId: "5", chunkSize: 2, }, { - start: 7, - stop: 9, - fileId: "7", + start: 7, + stop: 9, + fileId: "7", chunkSize: 2, }, { - start: 9, - stop: 10, - fileId: "9", + start: 9, + stop: 10, + fileId: "9", chunkSize: 2, }, } @@ -95,15 +95,15 @@ func TestReaderAt0(t *testing.T) { visibles := []VisibleInterval{ { - start: 2, - stop: 5, - fileId: "1", + start: 2, + stop: 5, + fileId: "1", chunkSize: 9, }, { - start: 7, - stop: 9, - fileId: "2", + start: 7, + stop: 9, + fileId: "2", chunkSize: 9, }, } @@ -129,9 +129,9 @@ func TestReaderAt1(t *testing.T) { visibles := []VisibleInterval{ { - start: 2, - stop: 5, - fileId: "1", + start: 2, + stop: 5, + fileId: "1", chunkSize: 9, }, } diff --git a/weed/filesys/dirty_page_interval_test.go b/weed/filesys/dirty_page_interval_test.go index 57da01bc3..d02ad27fd 100644 --- a/weed/filesys/dirty_page_interval_test.go +++ b/weed/filesys/dirty_page_interval_test.go @@ -73,14 +73,14 @@ func TestRandomWrites(t *testing.T) { data := make([]byte, 1024) - for i:=0;i<1024;i++ { + for i := 0; i < 1024; i++ { start, stop := rand.Intn(len(data)), rand.Intn(len(data)) if start > stop { - start,stop = stop, start + start, stop = stop, start } - rand.Read(data[start:stop+1]) + rand.Read(data[start : stop+1]) c.AddInterval(data[start:stop+1], int64(start)) diff --git a/weed/filesys/fscache_test.go b/weed/filesys/fscache_test.go index 8bfae1472..1152eb32e 100644 --- a/weed/filesys/fscache_test.go +++ b/weed/filesys/fscache_test.go @@ -95,7 +95,6 @@ func TestFsCacheMove(t *testing.T) { } - func TestFsCacheMove2(t *testing.T) { cache := newFsCache(nil) @@ -114,4 +113,3 @@ func TestFsCacheMove2(t *testing.T) { } } - diff --git a/weed/s3api/s3api_objects_list_handlers.go b/weed/s3api/s3api_objects_list_handlers.go index 3354dd2b3..254a99275 100644 --- a/weed/s3api/s3api_objects_list_handlers.go +++ b/weed/s3api/s3api_objects_list_handlers.go @@ -17,18 +17,18 @@ import ( ) type ListBucketResultV2 struct { - XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListBucketResult"` - Name string `xml:"Name"` - Prefix string `xml:"Prefix"` - MaxKeys int `xml:"MaxKeys"` - Delimiter string `xml:"Delimiter,omitempty"` - IsTruncated bool `xml:"IsTruncated"` - Contents []ListEntry `xml:"Contents,omitempty"` - CommonPrefixes []PrefixEntry `xml:"CommonPrefixes,omitempty"` - ContinuationToken string `xml:"ContinuationToken,omitempty"` - NextContinuationToken string `xml:"NextContinuationToken,omitempty"` - KeyCount int `xml:"KeyCount"` - StartAfter string `xml:"StartAfter,omitempty"` + XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListBucketResult"` + Name string `xml:"Name"` + Prefix string `xml:"Prefix"` + MaxKeys int `xml:"MaxKeys"` + Delimiter string `xml:"Delimiter,omitempty"` + IsTruncated bool `xml:"IsTruncated"` + Contents []ListEntry `xml:"Contents,omitempty"` + CommonPrefixes []PrefixEntry `xml:"CommonPrefixes,omitempty"` + ContinuationToken string `xml:"ContinuationToken,omitempty"` + NextContinuationToken string `xml:"NextContinuationToken,omitempty"` + KeyCount int `xml:"KeyCount"` + StartAfter string `xml:"StartAfter,omitempty"` } func (s3a *S3ApiServer) ListObjectsV2Handler(w http.ResponseWriter, r *http.Request) { diff --git a/weed/server/filer_server_handlers_write_autochunk.go b/weed/server/filer_server_handlers_write_autochunk.go index 266970618..1d037f85f 100644 --- a/weed/server/filer_server_handlers_write_autochunk.go +++ b/weed/server/filer_server_handlers_write_autochunk.go @@ -148,7 +148,6 @@ func (fs *FilerServer) saveMetaData(ctx context.Context, r *http.Request, fileNa crTime = existingEntry.Crtime } - glog.V(4).Infoln("saving", path) entry := &filer2.Entry{ FullPath: util.FullPath(path), diff --git a/weed/storage/needle/needle.go b/weed/storage/needle/needle.go index 0d962886b..34d29ab6e 100644 --- a/weed/storage/needle/needle.go +++ b/weed/storage/needle/needle.go @@ -24,7 +24,7 @@ const ( type Needle struct { Cookie Cookie `comment:"random number to mitigate brute force lookups"` Id NeedleId `comment:"needle id"` - Size Size `comment:"sum of DataSize,Data,NameSize,Name,MimeSize,Mime"` + Size Size `comment:"sum of DataSize,Data,NameSize,Name,MimeSize,Mime"` DataSize uint32 `comment:"Data size"` //version2 Data []byte `comment:"The actual file data"` diff --git a/weed/storage/needle_map/compact_map.go b/weed/storage/needle_map/compact_map.go index 81ff27c45..2b1a471bc 100644 --- a/weed/storage/needle_map/compact_map.go +++ b/weed/storage/needle_map/compact_map.go @@ -18,7 +18,7 @@ const SectionalNeedleIdLimit = 1<<32 - 1 type SectionalNeedleValue struct { Key SectionalNeedleId OffsetLower OffsetLower `comment:"Volume offset"` //since aligned to 8 bytes, range is 4G*8=32G - Size Size `comment:"Size of the data portion"` + Size Size `comment:"Size of the data portion"` } type SectionalNeedleValueExtra struct { @@ -116,7 +116,7 @@ func (cs *CompactSection) deleteOverflowEntry(key SectionalNeedleId) { }) if deleteCandidate != length && cs.overflow[deleteCandidate].Key == key { if cs.overflow[deleteCandidate].Size.IsValid() { - cs.overflow[deleteCandidate].Size = - cs.overflow[deleteCandidate].Size + cs.overflow[deleteCandidate].Size = -cs.overflow[deleteCandidate].Size } } } diff --git a/weed/storage/needle_map/needle_value.go b/weed/storage/needle_map/needle_value.go index f4687cb79..f8d614660 100644 --- a/weed/storage/needle_map/needle_value.go +++ b/weed/storage/needle_map/needle_value.go @@ -9,7 +9,7 @@ import ( type NeedleValue struct { Key NeedleId Offset Offset `comment:"Volume offset"` //since aligned to 8 bytes, range is 4G*8=32G - Size Size `comment:"Size of the data portion"` + Size Size `comment:"Size of the data portion"` } func (this NeedleValue) Less(than btree.Item) bool { diff --git a/weed/storage/types/needle_types.go b/weed/storage/types/needle_types.go index 7e30d2bd8..137b97d7f 100644 --- a/weed/storage/types/needle_types.go +++ b/weed/storage/types/needle_types.go @@ -18,7 +18,7 @@ func (s Size) IsDeleted() bool { return s < 0 || s == TombstoneFileSize } func (s Size) IsValid() bool { - return s >0 && s != TombstoneFileSize + return s > 0 && s != TombstoneFileSize } type OffsetLower struct {