skip error when draining reader

fix https://github.com/chrislusf/seaweedfs/issues/1179
This commit is contained in:
Chris Lu 2020-01-08 18:07:07 -08:00
parent 66ab09aa4a
commit 89e16bd2e8

View file

@ -11,6 +11,8 @@ import (
"net/http"
"net/url"
"strings"
"github.com/chrislusf/seaweedfs/weed/glog"
)
var (
@ -210,7 +212,8 @@ func ReadUrl(fileUrl string, offset int64, size int, buf []byte, isReadRange boo
}
var reader io.ReadCloser
switch r.Header.Get("Content-Encoding") {
contentEncoding := r.Header.Get("Content-Encoding")
switch contentEncoding {
case "gzip":
reader, err = gzip.NewReader(r.Body)
defer reader.Close()
@ -242,7 +245,7 @@ func ReadUrl(fileUrl string, offset int64, size int, buf []byte, isReadRange boo
// drains the response body to avoid memory leak
data, _ := ioutil.ReadAll(reader)
if len(data) != 0 {
err = fmt.Errorf("buffer size is too small. remains %d", len(data))
glog.V(1).Infof("%s reader has remaining %d bytes", contentEncoding, len(data))
}
return n, err
}