mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
parent
025bd8d447
commit
92f906b6fc
2
go.mod
2
go.mod
|
@ -40,7 +40,7 @@ require (
|
||||||
github.com/json-iterator/go v1.1.10
|
github.com/json-iterator/go v1.1.10
|
||||||
github.com/karlseguin/ccache v2.0.3+incompatible
|
github.com/karlseguin/ccache v2.0.3+incompatible
|
||||||
github.com/karlseguin/expect v1.0.1 // indirect
|
github.com/karlseguin/expect v1.0.1 // indirect
|
||||||
github.com/klauspost/compress v1.10.9
|
github.com/klauspost/compress v1.10.9 // indirect
|
||||||
github.com/klauspost/cpuid v1.2.1 // indirect
|
github.com/klauspost/cpuid v1.2.1 // indirect
|
||||||
github.com/klauspost/crc32 v1.2.0
|
github.com/klauspost/crc32 v1.2.0
|
||||||
github.com/klauspost/reedsolomon v1.9.2
|
github.com/klauspost/reedsolomon v1.9.2
|
||||||
|
|
|
@ -75,6 +75,7 @@ func TestCreateNeedleFromRequest(t *testing.T) {
|
||||||
Upload("http://localhost:8080/389,0f084d17353afda0", "t.txt", false, bytes.NewReader(gzippedData), true, "text/plain", nil, "")
|
Upload("http://localhost:8080/389,0f084d17353afda0", "t.txt", false, bytes.NewReader(gzippedData), true, "text/plain", nil, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
{
|
{
|
||||||
mc.needleHandling = func(n *needle.Needle, originalSize int, err error) {
|
mc.needleHandling = func(n *needle.Needle, originalSize int, err error) {
|
||||||
assert.Equal(t, nil, err, "upload: %v", err)
|
assert.Equal(t, nil, err, "upload: %v", err)
|
||||||
|
@ -98,6 +99,7 @@ func TestCreateNeedleFromRequest(t *testing.T) {
|
||||||
zstdData, _ := util.ZstdData([]byte(textContent))
|
zstdData, _ := util.ZstdData([]byte(textContent))
|
||||||
Upload("http://localhost:8080/389,0f084d17353afda0", "t.txt", false, bytes.NewReader(zstdData), false, "application/zstd", nil, "")
|
Upload("http://localhost:8080/389,0f084d17353afda0", "t.txt", false, bytes.NewReader(zstdData), false, "application/zstd", nil, "")
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -159,8 +159,8 @@ func (vs *VolumeServer) GetOrHeadHandler(w http.ResponseWriter, r *http.Request)
|
||||||
if n.Data, err = util.DecompressData(n.Data); err != nil {
|
if n.Data, err = util.DecompressData(n.Data); err != nil {
|
||||||
glog.V(0).Infoln("ungzip error:", err, r.URL.Path)
|
glog.V(0).Infoln("ungzip error:", err, r.URL.Path)
|
||||||
}
|
}
|
||||||
} else if strings.Contains(r.Header.Get("Accept-Encoding"), "zstd") && util.IsZstdContent(n.Data) {
|
// } else if strings.Contains(r.Header.Get("Accept-Encoding"), "zstd") && util.IsZstdContent(n.Data) {
|
||||||
w.Header().Set("Content-Encoding", "zstd")
|
// w.Header().Set("Content-Encoding", "zstd")
|
||||||
} else if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") && util.IsGzippedContent(n.Data) {
|
} else if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") && util.IsGzippedContent(n.Data) {
|
||||||
w.Header().Set("Content-Encoding", "gzip")
|
w.Header().Set("Content-Encoding", "gzip")
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -72,8 +72,9 @@ func (c *commandFsMetaCat) Do(args []string, commandEnv *CommandEnv, writer io.W
|
||||||
|
|
||||||
bytes, _ := proto.Marshal(respLookupEntry.Entry)
|
bytes, _ := proto.Marshal(respLookupEntry.Entry)
|
||||||
gzippedBytes, _ := util.GzipData(bytes)
|
gzippedBytes, _ := util.GzipData(bytes)
|
||||||
zstdBytes, _ := util.ZstdData(bytes)
|
// zstdBytes, _ := util.ZstdData(bytes)
|
||||||
fmt.Fprintf(writer, "chunks %d meta size: %d gzip:%d zstd:%d\n", len(respLookupEntry.Entry.Chunks), len(bytes), len(gzippedBytes), len(zstdBytes))
|
// fmt.Fprintf(writer, "chunks %d meta size: %d gzip:%d zstd:%d\n", len(respLookupEntry.Entry.Chunks), len(bytes), len(gzippedBytes), len(zstdBytes))
|
||||||
|
fmt.Fprintf(writer, "chunks %d meta size: %d gzip:%d\n", len(respLookupEntry.Entry.Chunks), len(bytes), len(gzippedBytes))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ type ParsedUpload struct {
|
||||||
MimeType string
|
MimeType string
|
||||||
PairMap map[string]string
|
PairMap map[string]string
|
||||||
IsGzipped bool
|
IsGzipped bool
|
||||||
IsZstd bool
|
// IsZstd bool
|
||||||
OriginalDataSize int
|
OriginalDataSize int
|
||||||
ModifiedTime uint64
|
ModifiedTime uint64
|
||||||
Ttl *TTL
|
Ttl *TTL
|
||||||
|
@ -100,7 +100,7 @@ func ParseUpload(r *http.Request, sizeLimit int64) (pu *ParsedUpload, e error) {
|
||||||
|
|
||||||
func parsePut(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
|
func parsePut(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
|
||||||
pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip"
|
pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip"
|
||||||
pu.IsZstd = r.Header.Get("Content-Encoding") == "zstd"
|
// pu.IsZstd = r.Header.Get("Content-Encoding") == "zstd"
|
||||||
pu.MimeType = r.Header.Get("Content-Type")
|
pu.MimeType = r.Header.Get("Content-Type")
|
||||||
pu.FileName = ""
|
pu.FileName = ""
|
||||||
pu.Data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
|
pu.Data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
|
||||||
|
@ -194,7 +194,7 @@ func parseMultipart(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error
|
||||||
}
|
}
|
||||||
|
|
||||||
pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip"
|
pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip"
|
||||||
pu.IsZstd = part.Header.Get("Content-Encoding") == "zstd"
|
// pu.IsZstd = part.Header.Get("Content-Encoding") == "zstd"
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
|
@ -9,7 +9,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/chrislusf/seaweedfs/weed/glog"
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||||
"github.com/klauspost/compress/zstd"
|
// "github.com/klauspost/compress/zstd"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -55,19 +55,16 @@ func GzipData(input []byte) ([]byte, error) {
|
||||||
return buf.Bytes(), nil
|
return buf.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var zstdEncoder, _ = zstd.NewWriter(nil)
|
|
||||||
|
|
||||||
func ZstdData(input []byte) ([]byte, error) {
|
|
||||||
return zstdEncoder.EncodeAll(input, nil), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func DecompressData(input []byte) ([]byte, error) {
|
func DecompressData(input []byte) ([]byte, error) {
|
||||||
if IsGzippedContent(input) {
|
if IsGzippedContent(input) {
|
||||||
return ungzipData(input)
|
return ungzipData(input)
|
||||||
}
|
}
|
||||||
|
/*
|
||||||
if IsZstdContent(input) {
|
if IsZstdContent(input) {
|
||||||
return unzstdData(input)
|
return unzstdData(input)
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
return input, UnsupportedCompression
|
return input, UnsupportedCompression
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,12 +79,6 @@ func ungzipData(input []byte) ([]byte, error) {
|
||||||
return output, err
|
return output, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var decoder, _ = zstd.NewReader(nil)
|
|
||||||
|
|
||||||
func unzstdData(input []byte) ([]byte, error) {
|
|
||||||
return decoder.DecodeAll(input, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsGzippedContent(data []byte) bool {
|
func IsGzippedContent(data []byte) bool {
|
||||||
if len(data) < 2 {
|
if len(data) < 2 {
|
||||||
return false
|
return false
|
||||||
|
@ -95,12 +86,26 @@ func IsGzippedContent(data []byte) bool {
|
||||||
return data[0] == 31 && data[1] == 139
|
return data[0] == 31 && data[1] == 139
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
var zstdEncoder, _ = zstd.NewWriter(nil)
|
||||||
|
|
||||||
|
func ZstdData(input []byte) ([]byte, error) {
|
||||||
|
return zstdEncoder.EncodeAll(input, nil), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var decoder, _ = zstd.NewReader(nil)
|
||||||
|
|
||||||
|
func unzstdData(input []byte) ([]byte, error) {
|
||||||
|
return decoder.DecodeAll(input, nil)
|
||||||
|
}
|
||||||
|
|
||||||
func IsZstdContent(data []byte) bool {
|
func IsZstdContent(data []byte) bool {
|
||||||
if len(data) < 4 {
|
if len(data) < 4 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return data[3] == 0xFD && data[2] == 0x2F && data[1] == 0xB5 && data[0] == 0x28
|
return data[3] == 0xFD && data[2] == 0x2F && data[1] == 0xB5 && data[0] == 0x28
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Default not to compressed since compression can be done on client side.
|
* Default not to compressed since compression can be done on client side.
|
||||||
|
|
Loading…
Reference in a new issue