2018-09-03 18:38:10 +00:00
|
|
|
package s3api
|
|
|
|
|
|
|
|
import (
|
2022-03-23 08:05:14 +00:00
|
|
|
"encoding/xml"
|
2022-04-12 03:04:38 +00:00
|
|
|
"crypto/sha1"
|
2018-09-04 07:42:44 +00:00
|
|
|
"fmt"
|
2020-11-11 10:01:24 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
2021-12-07 07:15:48 +00:00
|
|
|
xhttp "github.com/chrislusf/seaweedfs/weed/s3api/http"
|
2020-09-19 21:09:58 +00:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/s3api/s3err"
|
2021-07-02 02:12:11 +00:00
|
|
|
weed_server "github.com/chrislusf/seaweedfs/weed/server"
|
2022-03-23 08:05:14 +00:00
|
|
|
"io"
|
2018-09-09 23:26:11 +00:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"strconv"
|
2018-09-04 07:42:44 +00:00
|
|
|
"strings"
|
2020-02-10 01:42:17 +00:00
|
|
|
|
|
|
|
"github.com/aws/aws-sdk-go/aws"
|
|
|
|
"github.com/aws/aws-sdk-go/service/s3"
|
2018-09-04 07:42:44 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2020-02-25 19:45:40 +00:00
|
|
|
maxObjectListSizeLimit = 10000 // Limit number of objects in a listObjectsResponse.
|
|
|
|
maxUploadsList = 10000 // Limit number of uploads in a listUploadsResponse.
|
|
|
|
maxPartsList = 10000 // Limit number of parts in a listPartsResponse.
|
|
|
|
globalMaxPartID = 100000
|
2018-09-03 18:38:10 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// NewMultipartUploadHandler - New multipart upload.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) NewMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 07:15:48 +00:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2021-07-02 02:12:11 +00:00
|
|
|
createMultipartUploadInput := &s3.CreateMultipartUploadInput{
|
|
|
|
Bucket: aws.String(bucket),
|
|
|
|
Key: objectKey(aws.String(object)),
|
|
|
|
Metadata: make(map[string]*string),
|
|
|
|
}
|
|
|
|
|
|
|
|
metadata := weed_server.SaveAmzMetaData(r, nil, false)
|
|
|
|
for k, v := range metadata {
|
|
|
|
createMultipartUploadInput.Metadata[k] = aws.String(string(v))
|
|
|
|
}
|
|
|
|
|
2021-10-12 12:14:54 +00:00
|
|
|
contentType := r.Header.Get("Content-Type")
|
|
|
|
if contentType != "" {
|
|
|
|
createMultipartUploadInput.ContentType = &contentType
|
|
|
|
}
|
2021-07-02 02:12:11 +00:00
|
|
|
response, errCode := s3a.createMultipartUpload(createMultipartUploadInput)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2021-07-15 19:17:48 +00:00
|
|
|
glog.V(2).Info("NewMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)), errCode)
|
2020-11-11 10:01:24 +00:00
|
|
|
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-03 18:38:10 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// CompleteMultipartUploadHandler - Completes multipart upload.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) CompleteMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2022-03-23 08:05:14 +00:00
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
|
|
|
|
|
2021-12-07 07:15:48 +00:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2022-03-23 08:05:14 +00:00
|
|
|
parts := &CompleteMultipartUpload{}
|
|
|
|
if err := xmlDecoder(r.Body, parts, r.ContentLength); err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrMalformedXML)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-04 07:42:44 +00:00
|
|
|
// Get upload id.
|
|
|
|
uploadID, _, _, _ := getObjectResources(r.URL.Query())
|
2022-04-12 05:29:50 +00:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 10:59:36 +00:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2020-02-26 06:23:59 +00:00
|
|
|
response, errCode := s3a.completeMultipartUpload(&s3.CompleteMultipartUploadInput{
|
2018-09-12 07:46:12 +00:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-08 19:37:20 +00:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-12 07:46:12 +00:00
|
|
|
UploadId: aws.String(uploadID),
|
2022-03-23 08:05:14 +00:00
|
|
|
}, parts)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2021-07-15 19:17:48 +00:00
|
|
|
glog.V(2).Info("CompleteMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)), errCode)
|
2018-09-12 07:46:12 +00:00
|
|
|
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-03 18:38:10 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// AbortMultipartUploadHandler - Aborts multipart upload.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) AbortMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 07:15:48 +00:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
|
|
|
// Get upload id.
|
|
|
|
uploadID, _, _, _ := getObjectResources(r.URL.Query())
|
2022-04-12 05:29:50 +00:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 10:59:36 +00:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2020-02-26 06:23:59 +00:00
|
|
|
response, errCode := s3a.abortMultipartUpload(&s3.AbortMultipartUploadInput{
|
2018-09-04 07:42:44 +00:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-08 19:37:20 +00:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-04 07:42:44 +00:00
|
|
|
UploadId: aws.String(uploadID),
|
|
|
|
})
|
|
|
|
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-07-15 19:17:48 +00:00
|
|
|
glog.V(2).Info("AbortMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)))
|
2018-09-12 07:46:12 +00:00
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-03 18:38:10 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// ListMultipartUploadsHandler - Lists multipart uploads.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) ListMultipartUploadsHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 07:15:48 +00:00
|
|
|
bucket, _ := xhttp.GetBucketAndObject(r)
|
2018-09-03 18:38:10 +00:00
|
|
|
|
2018-09-04 07:42:44 +00:00
|
|
|
prefix, keyMarker, uploadIDMarker, delimiter, maxUploads, encodingType := getBucketMultipartResources(r.URL.Query())
|
|
|
|
if maxUploads < 0 {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxUploads)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if keyMarker != "" {
|
|
|
|
// Marker not common with prefix is not implemented.
|
|
|
|
if !strings.HasPrefix(keyMarker, prefix) {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNotImplemented)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-26 06:23:59 +00:00
|
|
|
response, errCode := s3a.listMultipartUploads(&s3.ListMultipartUploadsInput{
|
2018-09-04 07:42:44 +00:00
|
|
|
Bucket: aws.String(bucket),
|
|
|
|
Delimiter: aws.String(delimiter),
|
|
|
|
EncodingType: aws.String(encodingType),
|
|
|
|
KeyMarker: aws.String(keyMarker),
|
|
|
|
MaxUploads: aws.Int64(int64(maxUploads)),
|
|
|
|
Prefix: aws.String(prefix),
|
|
|
|
UploadIdMarker: aws.String(uploadIDMarker),
|
|
|
|
})
|
|
|
|
|
2021-10-28 06:46:07 +00:00
|
|
|
glog.V(2).Infof("ListMultipartUploadsHandler %s errCode=%d", string(s3err.EncodeXMLResponse(response)), errCode)
|
2020-11-11 10:01:24 +00:00
|
|
|
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-09 23:25:43 +00:00
|
|
|
// TODO handle encodingType
|
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-03 18:38:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ListObjectPartsHandler - Lists object parts in a multipart upload.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) ListObjectPartsHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 07:15:48 +00:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
|
|
|
uploadID, partNumberMarker, maxParts, _ := getObjectResources(r.URL.Query())
|
|
|
|
if partNumberMarker < 0 {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidPartNumberMarker)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if maxParts < 0 {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxParts)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-12 05:29:50 +00:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 10:59:36 +00:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-02-26 06:23:59 +00:00
|
|
|
response, errCode := s3a.listObjectParts(&s3.ListPartsInput{
|
2018-09-04 07:42:44 +00:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-08 19:37:20 +00:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-04 07:42:44 +00:00
|
|
|
MaxParts: aws.Int64(int64(maxParts)),
|
|
|
|
PartNumberMarker: aws.Int64(int64(partNumberMarker)),
|
|
|
|
UploadId: aws.String(uploadID),
|
|
|
|
})
|
|
|
|
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-02-07 19:37:59 +00:00
|
|
|
glog.V(2).Infof("ListObjectPartsHandler %s count=%d", string(s3err.EncodeXMLResponse(response)), len(response.Part))
|
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-03 18:38:10 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// PutObjectPartHandler - Put an object part in a multipart upload.
|
2018-09-04 07:42:44 +00:00
|
|
|
func (s3a *S3ApiServer) PutObjectPartHandler(w http.ResponseWriter, r *http.Request) {
|
2022-04-11 10:59:36 +00:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
|
|
|
uploadID := r.URL.Query().Get("uploadId")
|
2022-04-12 05:29:50 +00:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 10:59:36 +00:00
|
|
|
if err != nil {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
partIDString := r.URL.Query().Get("partNumber")
|
|
|
|
partID, err := strconv.Atoi(partIDString)
|
|
|
|
if err != nil {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidPart)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if partID > globalMaxPartID {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxParts)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
dataReader := r.Body
|
2020-08-08 16:11:40 +00:00
|
|
|
if s3a.iam.isEnabled() {
|
|
|
|
rAuthType := getRequestAuthType(r)
|
2020-09-19 21:09:58 +00:00
|
|
|
var s3ErrCode s3err.ErrorCode
|
2020-08-08 16:11:40 +00:00
|
|
|
switch rAuthType {
|
|
|
|
case authTypeStreamingSigned:
|
|
|
|
dataReader, s3ErrCode = s3a.iam.newSignV4ChunkedReader(r)
|
|
|
|
case authTypeSignedV2, authTypePresignedV2:
|
|
|
|
_, s3ErrCode = s3a.iam.isReqAuthenticatedV2(r)
|
|
|
|
case authTypePresigned, authTypeSigned:
|
|
|
|
_, s3ErrCode = s3a.iam.reqSignatureV4Verify(r)
|
|
|
|
}
|
2020-09-19 21:09:58 +00:00
|
|
|
if s3ErrCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, s3ErrCode)
|
2020-08-08 16:11:40 +00:00
|
|
|
return
|
|
|
|
}
|
2018-09-04 07:42:44 +00:00
|
|
|
}
|
2020-02-14 17:09:15 +00:00
|
|
|
defer dataReader.Close()
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2021-10-28 06:46:07 +00:00
|
|
|
glog.V(2).Infof("PutObjectPartHandler %s %s %04d", bucket, uploadID, partID)
|
|
|
|
|
2019-03-31 18:10:19 +00:00
|
|
|
uploadUrl := fmt.Sprintf("http://%s%s/%s/%04d.part?collection=%s",
|
2021-09-13 05:47:52 +00:00
|
|
|
s3a.option.Filer.ToHttpAddress(), s3a.genUploadsFolder(bucket), uploadID, partID, bucket)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2021-10-14 10:03:11 +00:00
|
|
|
if partID == 1 && r.Header.Get("Content-Type") == "" {
|
|
|
|
dataReader = mimeDetect(r, dataReader)
|
|
|
|
}
|
2018-09-03 18:38:10 +00:00
|
|
|
|
2021-10-14 10:03:11 +00:00
|
|
|
etag, errCode := s3a.putToFiler(r, uploadUrl, dataReader)
|
2020-09-19 21:09:58 +00:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 01:05:34 +00:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 07:42:44 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
setEtag(w, etag)
|
|
|
|
|
2021-11-01 01:02:08 +00:00
|
|
|
writeSuccessResponseEmpty(w, r)
|
2018-09-04 07:42:44 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s3a *S3ApiServer) genUploadsFolder(bucket string) string {
|
2018-09-20 05:03:16 +00:00
|
|
|
return fmt.Sprintf("%s/%s/.uploads", s3a.option.BucketsPath, bucket)
|
2018-09-03 18:38:10 +00:00
|
|
|
}
|
|
|
|
|
2022-04-12 03:04:38 +00:00
|
|
|
// Generate uploadID hash string from object
|
|
|
|
func (s3a *S3ApiServer) generateUploadID(object string) string {
|
|
|
|
if strings.HasPrefix(object, "/") {
|
|
|
|
object = object[1:]
|
2022-04-11 10:59:36 +00:00
|
|
|
}
|
2022-04-12 03:04:38 +00:00
|
|
|
h := sha1.New()
|
|
|
|
h.Write([]byte(object))
|
|
|
|
return fmt.Sprintf("%x", h.Sum(nil))
|
|
|
|
}
|
|
|
|
|
|
|
|
//Check object name and uploadID when processing multipart uploading
|
2022-04-12 05:29:50 +00:00
|
|
|
func (s3a *S3ApiServer) checkUploadId(object string, id string) error {
|
2022-04-11 10:59:36 +00:00
|
|
|
|
2022-04-12 03:04:38 +00:00
|
|
|
hash := s3a.generateUploadID(object)
|
|
|
|
if hash != id {
|
2022-04-11 10:59:36 +00:00
|
|
|
glog.Errorf("object %s and uploadID %s are not matched", object, id)
|
|
|
|
return fmt.Errorf("object %s and uploadID %s are not matched", object, id)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-09-04 07:42:44 +00:00
|
|
|
// Parse bucket url queries for ?uploads
|
|
|
|
func getBucketMultipartResources(values url.Values) (prefix, keyMarker, uploadIDMarker, delimiter string, maxUploads int, encodingType string) {
|
|
|
|
prefix = values.Get("prefix")
|
|
|
|
keyMarker = values.Get("key-marker")
|
|
|
|
uploadIDMarker = values.Get("upload-id-marker")
|
|
|
|
delimiter = values.Get("delimiter")
|
|
|
|
if values.Get("max-uploads") != "" {
|
|
|
|
maxUploads, _ = strconv.Atoi(values.Get("max-uploads"))
|
|
|
|
} else {
|
|
|
|
maxUploads = maxUploadsList
|
|
|
|
}
|
|
|
|
encodingType = values.Get("encoding-type")
|
|
|
|
return
|
|
|
|
}
|
2018-09-03 18:38:10 +00:00
|
|
|
|
2018-09-04 07:42:44 +00:00
|
|
|
// Parse object url queries
|
|
|
|
func getObjectResources(values url.Values) (uploadID string, partNumberMarker, maxParts int, encodingType string) {
|
|
|
|
uploadID = values.Get("uploadId")
|
|
|
|
partNumberMarker, _ = strconv.Atoi(values.Get("part-number-marker"))
|
|
|
|
if values.Get("max-parts") != "" {
|
|
|
|
maxParts, _ = strconv.Atoi(values.Get("max-parts"))
|
|
|
|
} else {
|
|
|
|
maxParts = maxPartsList
|
|
|
|
}
|
|
|
|
encodingType = values.Get("encoding-type")
|
|
|
|
return
|
2018-09-03 18:38:10 +00:00
|
|
|
}
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2022-03-23 08:05:14 +00:00
|
|
|
func xmlDecoder(body io.Reader, v interface{}, size int64) error {
|
|
|
|
var lbody io.Reader
|
|
|
|
if size > 0 {
|
|
|
|
lbody = io.LimitReader(body, size)
|
|
|
|
} else {
|
|
|
|
lbody = body
|
|
|
|
}
|
|
|
|
d := xml.NewDecoder(lbody)
|
|
|
|
d.CharsetReader = func(label string, input io.Reader) (io.Reader, error) {
|
|
|
|
return input, nil
|
|
|
|
}
|
|
|
|
return d.Decode(v)
|
|
|
|
}
|
2018-09-04 07:42:44 +00:00
|
|
|
|
2022-03-23 08:05:14 +00:00
|
|
|
type CompleteMultipartUpload struct {
|
|
|
|
Parts []CompletedPart `xml:"Part"`
|
|
|
|
}
|
|
|
|
type CompletedPart struct {
|
|
|
|
ETag string
|
|
|
|
PartNumber int
|
|
|
|
}
|