mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
to read files that are quickly changing
This commit is contained in:
parent
575d7952a1
commit
d75bc62196
|
@ -101,7 +101,7 @@ func retriedFetchChunkData(urlStrings []string, cipherKey []byte, isGzipped bool
|
|||
|
||||
for waitTime := time.Second; waitTime < ReadWaitTime; waitTime += waitTime / 2 {
|
||||
for _, urlString := range urlStrings {
|
||||
shouldRetry, err = util.ReadUrlAsStream(urlString, cipherKey, isGzipped, isFullChunk, offset, size, func(data []byte) {
|
||||
shouldRetry, err = util.ReadUrlAsStream(urlString+"?readDeleted=true", cipherKey, isGzipped, isFullChunk, offset, size, func(data []byte) {
|
||||
buffer.Write(data)
|
||||
})
|
||||
if !shouldRetry {
|
||||
|
|
|
@ -181,7 +181,7 @@ func (c *ChunkStreamReader) fetchChunkToBuffer(chunkView *ChunkView) error {
|
|||
var buffer bytes.Buffer
|
||||
var shouldRetry bool
|
||||
for _, urlString := range urlStrings {
|
||||
shouldRetry, err = util.ReadUrlAsStream(urlString, chunkView.CipherKey, chunkView.IsGzipped, chunkView.IsFullChunk(), chunkView.Offset, int(chunkView.Size), func(data []byte) {
|
||||
shouldRetry, err = util.ReadUrlAsStream(urlString+"?readDeleted=true", chunkView.CipherKey, chunkView.IsGzipped, chunkView.IsFullChunk(), chunkView.Offset, int(chunkView.Size), func(data []byte) {
|
||||
buffer.Write(data)
|
||||
})
|
||||
if !shouldRetry {
|
||||
|
|
|
@ -163,7 +163,7 @@ func (s3sink *S3Sink) buildReadSeeker(chunk *filer.ChunkView) (io.ReadSeeker, er
|
|||
}
|
||||
buf := make([]byte, chunk.Size)
|
||||
for _, fileUrl := range fileUrls {
|
||||
_, err = util.ReadUrl(fileUrl, nil, false, false, chunk.Offset, int(chunk.Size), buf)
|
||||
_, err = util.ReadUrl(fileUrl+"?readDeleted=true", nil, false, false, chunk.Offset, int(chunk.Size), buf)
|
||||
if err != nil {
|
||||
glog.V(1).Infof("read from %s: %v", fileUrl, err)
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue