Browse Source

filer sink retries reading file chunks, skipping missing chunks

if the file chunk is not available during replication time, the file is skipped
pull/4077/head
chrislu 2 years ago
parent
commit
6c7fe40305
  1. 6
      weed/replication/sink/filersink/fetch_write.go
  2. 8
      weed/replication/sink/filersink/filer_sink.go

6
weed/replication/sink/filersink/fetch_write.go

@ -27,12 +27,16 @@ func (fs *FilerSink) replicateChunks(sourceChunks []*filer_pb.FileChunk, path st
index, source := chunkIndex, sourceChunk
fs.executor.Execute(func() {
defer wg.Done()
util.Retry("replicate chunks", func() error {
replicatedChunk, e := fs.replicateOneChunk(source, path)
if e != nil {
err = e
return
return e
}
replicatedChunks[index] = replicatedChunk
err = nil
return nil
})
})
}
wg.Wait()

8
weed/replication/sink/filersink/filer_sink.go

@ -112,7 +112,7 @@ func (fs *FilerSink) CreateEntry(key string, entry *filer_pb.Entry, signatures [
Directory: dir,
Name: name,
}
glog.V(1).Infof("lookup: %v", lookupRequest)
// glog.V(1).Infof("lookup: %v", lookupRequest)
if resp, err := filer_pb.LookupEntry(client, lookupRequest); err == nil {
if filer.ETag(resp.Entry) == filer.ETag(entry) {
glog.V(3).Infof("already replicated %s", key)
@ -125,9 +125,10 @@ func (fs *FilerSink) CreateEntry(key string, entry *filer_pb.Entry, signatures [
if err != nil {
// only warning here since the source chunk may have been deleted already
glog.Warningf("replicate entry chunks %s: %v", key, err)
return nil
}
glog.V(4).Infof("replicated %s %+v ===> %+v", key, entry.GetChunks(), replicatedChunks)
// glog.V(4).Infof("replicated %s %+v ===> %+v", key, entry.GetChunks(), replicatedChunks)
request := &filer_pb.CreateEntryRequest{
Directory: dir,
@ -205,7 +206,8 @@ func (fs *FilerSink) UpdateEntry(key string, oldEntry *filer_pb.Entry, newParent
// replicate the chunks that are new in the source
replicatedChunks, err := fs.replicateChunks(newChunks, key)
if err != nil {
return true, fmt.Errorf("replicate %s chunks error: %v", key, err)
glog.Warningf("replicate entry chunks %s: %v", key, err)
return true, nil
}
existingEntry.Chunks = append(existingEntry.GetChunks(), replicatedChunks...)
existingEntry.Attributes = newEntry.Attributes

Loading…
Cancel
Save