mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2024-01-19 02:48:24 +00:00
fix chunk.ModifiedTsNs (#4264)
* fix * fix mtime s > ns --------- Co-authored-by: zemul <zhouzemiao@ihuman.com>
This commit is contained in:
parent
8241c9a829
commit
0bf56298d5
|
@ -1,47 +1,47 @@
|
|||
package images
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/gif"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
|
||||
"github.com/seaweedfs/seaweedfs/weed/glog"
|
||||
)
|
||||
|
||||
func Cropped(ext string, read io.ReadSeeker, x1, y1, x2, y2 int) (cropped io.ReadSeeker, err error) {
|
||||
srcImage, _, err := image.Decode(read)
|
||||
if err != nil {
|
||||
glog.Error(err)
|
||||
return read, err
|
||||
}
|
||||
|
||||
bounds := srcImage.Bounds()
|
||||
if x2 > bounds.Dx() || y2 > bounds.Dy() {
|
||||
read.Seek(0, 0)
|
||||
return read, nil
|
||||
}
|
||||
|
||||
rectangle := image.Rect(x1, y1, x2, y2)
|
||||
dstImage := imaging.Crop(srcImage, rectangle)
|
||||
var buf bytes.Buffer
|
||||
switch ext {
|
||||
case ".jpg", ".jpeg":
|
||||
if err = jpeg.Encode(&buf, dstImage, nil); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
case ".png":
|
||||
if err = png.Encode(&buf, dstImage); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
case ".gif":
|
||||
if err = gif.Encode(&buf, dstImage, nil); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
}
|
||||
return bytes.NewReader(buf.Bytes()), err
|
||||
}
|
||||
package images
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/gif"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
|
||||
"github.com/seaweedfs/seaweedfs/weed/glog"
|
||||
)
|
||||
|
||||
func Cropped(ext string, read io.ReadSeeker, x1, y1, x2, y2 int) (cropped io.ReadSeeker, err error) {
|
||||
srcImage, _, err := image.Decode(read)
|
||||
if err != nil {
|
||||
glog.Error(err)
|
||||
return read, err
|
||||
}
|
||||
|
||||
bounds := srcImage.Bounds()
|
||||
if x2 > bounds.Dx() || y2 > bounds.Dy() {
|
||||
read.Seek(0, 0)
|
||||
return read, nil
|
||||
}
|
||||
|
||||
rectangle := image.Rect(x1, y1, x2, y2)
|
||||
dstImage := imaging.Crop(srcImage, rectangle)
|
||||
var buf bytes.Buffer
|
||||
switch ext {
|
||||
case ".jpg", ".jpeg":
|
||||
if err = jpeg.Encode(&buf, dstImage, nil); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
case ".png":
|
||||
if err = png.Encode(&buf, dstImage); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
case ".gif":
|
||||
if err = gif.Encode(&buf, dstImage, nil); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
}
|
||||
return bytes.NewReader(buf.Bytes()), err
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package s3_constants
|
||||
|
||||
//Amazon S3 predefined groups
|
||||
// Amazon S3 predefined groups
|
||||
var (
|
||||
GranteeGroupAllUsers = "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||
GranteeGroupAuthenticatedUsers = "http://acs.amazonaws.com/groups/global/AuthenticatedUsers"
|
||||
|
|
|
@ -154,10 +154,11 @@ func (fs *FilerServer) CacheRemoteObjectToLocalCluster(ctx context.Context, req
|
|||
}
|
||||
|
||||
chunks = append(chunks, &filer_pb.FileChunk{
|
||||
|
||||
FileId: assignResult.Fid,
|
||||
Offset: localOffset,
|
||||
Size: uint64(size),
|
||||
ModifiedTsNs: time.Now().Unix(),
|
||||
ModifiedTsNs: time.Now().UnixNano(),
|
||||
ETag: etag,
|
||||
Fid: &filer_pb.FileId{
|
||||
VolumeId: uint32(fileId.VolumeId),
|
||||
|
|
|
@ -163,6 +163,7 @@ func (c *commandVolumeFsck) Do(args []string, commandEnv *CommandEnv, writer io.
|
|||
|
||||
if *c.findMissingChunksInFiler {
|
||||
// collect all filer file ids and paths
|
||||
|
||||
if err = c.collectFilerFileIdAndPaths(dataNodeVolumeIdToVInfo, *purgeAbsent, collectCutoffFromAtNs); err != nil {
|
||||
return fmt.Errorf("collectFilerFileIdAndPaths: %v", err)
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ type Topology struct {
|
|||
|
||||
volumeSizeLimit uint64
|
||||
replicationAsMin bool
|
||||
isDisableVacuum bool
|
||||
isDisableVacuum bool
|
||||
|
||||
Sequence sequence.Sequencer
|
||||
|
||||
|
|
Loading…
Reference in a new issue