2020-01-31 08:11:08 +00:00
|
|
|
package s3api
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/chrislusf/seaweedfs/weed/util"
|
|
|
|
)
|
|
|
|
|
|
|
|
func (s3a *S3ApiServer) CopyObjectHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
2020-07-26 19:58:58 +00:00
|
|
|
dstBucket, dstObject := getBucketAndObject(r)
|
2020-01-31 08:11:08 +00:00
|
|
|
|
|
|
|
// Copy source path.
|
|
|
|
cpSrcPath, err := url.QueryUnescape(r.Header.Get("X-Amz-Copy-Source"))
|
|
|
|
if err != nil {
|
|
|
|
// Save unescaped string as is.
|
|
|
|
cpSrcPath = r.Header.Get("X-Amz-Copy-Source")
|
|
|
|
}
|
|
|
|
|
|
|
|
srcBucket, srcObject := pathToBucketAndObject(cpSrcPath)
|
|
|
|
// If source object is empty or bucket is empty, reply back invalid copy source.
|
|
|
|
if srcObject == "" || srcBucket == "" {
|
|
|
|
writeErrorResponse(w, ErrInvalidCopySource, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if srcBucket == dstBucket && srcObject == dstObject {
|
|
|
|
writeErrorResponse(w, ErrInvalidCopySource, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
dstUrl := fmt.Sprintf("http://%s%s/%s%s?collection=%s",
|
|
|
|
s3a.option.Filer, s3a.option.BucketsPath, dstBucket, dstObject, dstBucket)
|
|
|
|
srcUrl := fmt.Sprintf("http://%s%s/%s%s",
|
|
|
|
s3a.option.Filer, s3a.option.BucketsPath, srcBucket, srcObject)
|
|
|
|
|
|
|
|
_, _, dataReader, err := util.DownloadFile(srcUrl)
|
|
|
|
if err != nil {
|
|
|
|
writeErrorResponse(w, ErrInvalidCopySource, r.URL)
|
|
|
|
return
|
|
|
|
}
|
2020-02-14 17:09:15 +00:00
|
|
|
defer dataReader.Close()
|
2020-01-31 08:11:08 +00:00
|
|
|
|
|
|
|
etag, errCode := s3a.putToFiler(r, dstUrl, dataReader)
|
|
|
|
|
|
|
|
if errCode != ErrNone {
|
|
|
|
writeErrorResponse(w, errCode, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
setEtag(w, etag)
|
|
|
|
|
|
|
|
response := CopyObjectResult{
|
|
|
|
ETag: etag,
|
2020-07-25 07:52:31 +00:00
|
|
|
LastModified: time.Now().UTC(),
|
2020-01-31 08:11:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
writeSuccessResponseXML(w, encodeResponse(response))
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func pathToBucketAndObject(path string) (bucket, object string) {
|
|
|
|
path = strings.TrimPrefix(path, "/")
|
|
|
|
parts := strings.SplitN(path, "/", 2)
|
|
|
|
if len(parts) == 2 {
|
|
|
|
return parts[0], "/" + parts[1]
|
|
|
|
}
|
|
|
|
return parts[0], "/"
|
|
|
|
}
|
|
|
|
|
|
|
|
type CopyPartResult struct {
|
|
|
|
LastModified time.Time `xml:"LastModified"`
|
|
|
|
ETag string `xml:"ETag"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s3a *S3ApiServer) CopyObjectPartHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/dev/CopyingObjctsUsingRESTMPUapi.html
|
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_UploadPartCopy.html
|
2020-07-26 19:58:58 +00:00
|
|
|
dstBucket, _ := getBucketAndObject(r)
|
2020-01-31 08:11:08 +00:00
|
|
|
|
|
|
|
// Copy source path.
|
|
|
|
cpSrcPath, err := url.QueryUnescape(r.Header.Get("X-Amz-Copy-Source"))
|
|
|
|
if err != nil {
|
|
|
|
// Save unescaped string as is.
|
|
|
|
cpSrcPath = r.Header.Get("X-Amz-Copy-Source")
|
|
|
|
}
|
|
|
|
|
|
|
|
srcBucket, srcObject := pathToBucketAndObject(cpSrcPath)
|
|
|
|
// If source object is empty or bucket is empty, reply back invalid copy source.
|
|
|
|
if srcObject == "" || srcBucket == "" {
|
|
|
|
writeErrorResponse(w, ErrInvalidCopySource, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
uploadID := r.URL.Query().Get("uploadId")
|
|
|
|
partIDString := r.URL.Query().Get("partNumber")
|
|
|
|
|
|
|
|
partID, err := strconv.Atoi(partIDString)
|
|
|
|
if err != nil {
|
|
|
|
writeErrorResponse(w, ErrInvalidPart, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// check partID with maximum part ID for multipart objects
|
2020-02-25 20:58:45 +00:00
|
|
|
if partID > globalMaxPartID {
|
2020-01-31 08:11:08 +00:00
|
|
|
writeErrorResponse(w, ErrInvalidMaxParts, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
rangeHeader := r.Header.Get("x-amz-copy-source-range")
|
|
|
|
|
|
|
|
dstUrl := fmt.Sprintf("http://%s%s/%s/%04d.part?collection=%s",
|
|
|
|
s3a.option.Filer, s3a.genUploadsFolder(dstBucket), uploadID, partID-1, dstBucket)
|
|
|
|
srcUrl := fmt.Sprintf("http://%s%s/%s%s",
|
|
|
|
s3a.option.Filer, s3a.option.BucketsPath, srcBucket, srcObject)
|
|
|
|
|
|
|
|
dataReader, err := util.ReadUrlAsReaderCloser(srcUrl, rangeHeader)
|
|
|
|
if err != nil {
|
|
|
|
writeErrorResponse(w, ErrInvalidCopySource, r.URL)
|
|
|
|
return
|
|
|
|
}
|
2020-02-14 17:09:15 +00:00
|
|
|
defer dataReader.Close()
|
2020-01-31 08:11:08 +00:00
|
|
|
|
|
|
|
etag, errCode := s3a.putToFiler(r, dstUrl, dataReader)
|
|
|
|
|
|
|
|
if errCode != ErrNone {
|
|
|
|
writeErrorResponse(w, errCode, r.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
setEtag(w, etag)
|
|
|
|
|
|
|
|
response := CopyPartResult{
|
|
|
|
ETag: etag,
|
2020-07-25 07:52:31 +00:00
|
|
|
LastModified: time.Now().UTC(),
|
2020-01-31 08:11:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
writeSuccessResponseXML(w, encodeResponse(response))
|
|
|
|
|
|
|
|
}
|