2018-09-04 02:38:10 +08:00
|
|
|
package s3api
|
|
|
|
|
|
|
|
import (
|
2022-04-12 11:04:38 +08:00
|
|
|
"crypto/sha1"
|
2022-05-02 14:16:29 +08:00
|
|
|
"encoding/xml"
|
2018-09-04 15:42:44 +08:00
|
|
|
"fmt"
|
2020-11-11 18:01:24 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
2021-12-07 15:15:48 +08:00
|
|
|
xhttp "github.com/chrislusf/seaweedfs/weed/s3api/http"
|
2020-09-20 05:09:58 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/s3api/s3err"
|
2021-07-02 10:12:11 +08:00
|
|
|
weed_server "github.com/chrislusf/seaweedfs/weed/server"
|
2022-03-23 16:05:14 +08:00
|
|
|
"io"
|
2018-09-10 07:26:11 +08:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"strconv"
|
2018-09-04 15:42:44 +08:00
|
|
|
"strings"
|
2020-02-10 09:42:17 +08:00
|
|
|
|
|
|
|
"github.com/aws/aws-sdk-go/aws"
|
|
|
|
"github.com/aws/aws-sdk-go/service/s3"
|
2018-09-04 15:42:44 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2020-02-26 03:45:40 +08:00
|
|
|
maxObjectListSizeLimit = 10000 // Limit number of objects in a listObjectsResponse.
|
|
|
|
maxUploadsList = 10000 // Limit number of uploads in a listUploadsResponse.
|
|
|
|
maxPartsList = 10000 // Limit number of parts in a listPartsResponse.
|
|
|
|
globalMaxPartID = 100000
|
2018-09-04 02:38:10 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
// NewMultipartUploadHandler - New multipart upload.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) NewMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 15:15:48 +08:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2021-07-02 10:12:11 +08:00
|
|
|
createMultipartUploadInput := &s3.CreateMultipartUploadInput{
|
|
|
|
Bucket: aws.String(bucket),
|
|
|
|
Key: objectKey(aws.String(object)),
|
|
|
|
Metadata: make(map[string]*string),
|
|
|
|
}
|
|
|
|
|
|
|
|
metadata := weed_server.SaveAmzMetaData(r, nil, false)
|
|
|
|
for k, v := range metadata {
|
|
|
|
createMultipartUploadInput.Metadata[k] = aws.String(string(v))
|
|
|
|
}
|
|
|
|
|
2021-10-12 20:14:54 +08:00
|
|
|
contentType := r.Header.Get("Content-Type")
|
|
|
|
if contentType != "" {
|
|
|
|
createMultipartUploadInput.ContentType = &contentType
|
|
|
|
}
|
2021-07-02 10:12:11 +08:00
|
|
|
response, errCode := s3a.createMultipartUpload(createMultipartUploadInput)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2021-07-16 03:17:48 +08:00
|
|
|
glog.V(2).Info("NewMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)), errCode)
|
2020-11-11 18:01:24 +08:00
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-04 02:38:10 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// CompleteMultipartUploadHandler - Completes multipart upload.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) CompleteMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2022-03-23 16:05:14 +08:00
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html
|
|
|
|
|
2021-12-07 15:15:48 +08:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
parts := &CompleteMultipartUpload{}
|
|
|
|
if err := xmlDecoder(r.Body, parts, r.ContentLength); err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrMalformedXML)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
// Get upload id.
|
|
|
|
uploadID, _, _, _ := getObjectResources(r.URL.Query())
|
2022-04-12 13:29:50 +08:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 18:59:36 +08:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
response, errCode := s3a.completeMultipartUpload(&s3.CompleteMultipartUploadInput{
|
2018-09-12 15:46:12 +08:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-09 03:37:20 +08:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-12 15:46:12 +08:00
|
|
|
UploadId: aws.String(uploadID),
|
2022-03-23 16:05:14 +08:00
|
|
|
}, parts)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2021-07-16 03:17:48 +08:00
|
|
|
glog.V(2).Info("CompleteMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)), errCode)
|
2018-09-12 15:46:12 +08:00
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-04 02:38:10 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// AbortMultipartUploadHandler - Aborts multipart upload.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) AbortMultipartUploadHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 15:15:48 +08:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
|
|
|
// Get upload id.
|
|
|
|
uploadID, _, _, _ := getObjectResources(r.URL.Query())
|
2022-04-12 13:29:50 +08:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 18:59:36 +08:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
response, errCode := s3a.abortMultipartUpload(&s3.AbortMultipartUploadInput{
|
2018-09-04 15:42:44 +08:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-09 03:37:20 +08:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-04 15:42:44 +08:00
|
|
|
UploadId: aws.String(uploadID),
|
|
|
|
})
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-07-16 03:17:48 +08:00
|
|
|
glog.V(2).Info("AbortMultipartUploadHandler", string(s3err.EncodeXMLResponse(response)))
|
2018-09-12 15:46:12 +08:00
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-04 02:38:10 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// ListMultipartUploadsHandler - Lists multipart uploads.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) ListMultipartUploadsHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 15:15:48 +08:00
|
|
|
bucket, _ := xhttp.GetBucketAndObject(r)
|
2018-09-04 02:38:10 +08:00
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
prefix, keyMarker, uploadIDMarker, delimiter, maxUploads, encodingType := getBucketMultipartResources(r.URL.Query())
|
|
|
|
if maxUploads < 0 {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxUploads)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if keyMarker != "" {
|
|
|
|
// Marker not common with prefix is not implemented.
|
|
|
|
if !strings.HasPrefix(keyMarker, prefix) {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNotImplemented)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
response, errCode := s3a.listMultipartUploads(&s3.ListMultipartUploadsInput{
|
2018-09-04 15:42:44 +08:00
|
|
|
Bucket: aws.String(bucket),
|
|
|
|
Delimiter: aws.String(delimiter),
|
|
|
|
EncodingType: aws.String(encodingType),
|
|
|
|
KeyMarker: aws.String(keyMarker),
|
|
|
|
MaxUploads: aws.Int64(int64(maxUploads)),
|
|
|
|
Prefix: aws.String(prefix),
|
|
|
|
UploadIdMarker: aws.String(uploadIDMarker),
|
|
|
|
})
|
|
|
|
|
2021-10-28 14:46:07 +08:00
|
|
|
glog.V(2).Infof("ListMultipartUploadsHandler %s errCode=%d", string(s3err.EncodeXMLResponse(response)), errCode)
|
2020-11-11 18:01:24 +08:00
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-10 07:25:43 +08:00
|
|
|
// TODO handle encodingType
|
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-04 02:38:10 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// ListObjectPartsHandler - Lists object parts in a multipart upload.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) ListObjectPartsHandler(w http.ResponseWriter, r *http.Request) {
|
2021-12-07 15:15:48 +08:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
|
|
|
uploadID, partNumberMarker, maxParts, _ := getObjectResources(r.URL.Query())
|
|
|
|
if partNumberMarker < 0 {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidPartNumberMarker)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if maxParts < 0 {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxParts)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-12 13:29:50 +08:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 18:59:36 +08:00
|
|
|
if err != nil {
|
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
response, errCode := s3a.listObjectParts(&s3.ListPartsInput{
|
2018-09-04 15:42:44 +08:00
|
|
|
Bucket: aws.String(bucket),
|
2019-07-09 03:37:20 +08:00
|
|
|
Key: objectKey(aws.String(object)),
|
2018-09-04 15:42:44 +08:00
|
|
|
MaxParts: aws.Int64(int64(maxParts)),
|
|
|
|
PartNumberMarker: aws.Int64(int64(partNumberMarker)),
|
|
|
|
UploadId: aws.String(uploadID),
|
|
|
|
})
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-02-08 03:37:59 +08:00
|
|
|
glog.V(2).Infof("ListObjectPartsHandler %s count=%d", string(s3err.EncodeXMLResponse(response)), len(response.Part))
|
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseXML(w, r, response)
|
2018-09-04 02:38:10 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// PutObjectPartHandler - Put an object part in a multipart upload.
|
2018-09-04 15:42:44 +08:00
|
|
|
func (s3a *S3ApiServer) PutObjectPartHandler(w http.ResponseWriter, r *http.Request) {
|
2022-04-11 18:59:36 +08:00
|
|
|
bucket, object := xhttp.GetBucketAndObject(r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
|
|
|
uploadID := r.URL.Query().Get("uploadId")
|
2022-04-12 13:29:50 +08:00
|
|
|
err := s3a.checkUploadId(object, uploadID)
|
2022-04-11 18:59:36 +08:00
|
|
|
if err != nil {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrNoSuchUpload)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
partIDString := r.URL.Query().Get("partNumber")
|
|
|
|
partID, err := strconv.Atoi(partIDString)
|
|
|
|
if err != nil {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidPart)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if partID > globalMaxPartID {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3err.ErrInvalidMaxParts)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
dataReader := r.Body
|
2020-08-09 00:11:40 +08:00
|
|
|
if s3a.iam.isEnabled() {
|
|
|
|
rAuthType := getRequestAuthType(r)
|
2020-09-20 05:09:58 +08:00
|
|
|
var s3ErrCode s3err.ErrorCode
|
2020-08-09 00:11:40 +08:00
|
|
|
switch rAuthType {
|
|
|
|
case authTypeStreamingSigned:
|
|
|
|
dataReader, s3ErrCode = s3a.iam.newSignV4ChunkedReader(r)
|
|
|
|
case authTypeSignedV2, authTypePresignedV2:
|
|
|
|
_, s3ErrCode = s3a.iam.isReqAuthenticatedV2(r)
|
|
|
|
case authTypePresigned, authTypeSigned:
|
|
|
|
_, s3ErrCode = s3a.iam.reqSignatureV4Verify(r)
|
|
|
|
}
|
2020-09-20 05:09:58 +08:00
|
|
|
if s3ErrCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, s3ErrCode)
|
2020-08-09 00:11:40 +08:00
|
|
|
return
|
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
2020-02-15 01:09:15 +08:00
|
|
|
defer dataReader.Close()
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2021-10-28 14:46:07 +08:00
|
|
|
glog.V(2).Infof("PutObjectPartHandler %s %s %04d", bucket, uploadID, partID)
|
|
|
|
|
2019-04-01 02:10:19 +08:00
|
|
|
uploadUrl := fmt.Sprintf("http://%s%s/%s/%04d.part?collection=%s",
|
2021-09-13 13:47:52 +08:00
|
|
|
s3a.option.Filer.ToHttpAddress(), s3a.genUploadsFolder(bucket), uploadID, partID, bucket)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2021-10-14 18:03:11 +08:00
|
|
|
if partID == 1 && r.Header.Get("Content-Type") == "" {
|
|
|
|
dataReader = mimeDetect(r, dataReader)
|
|
|
|
}
|
2018-09-04 02:38:10 +08:00
|
|
|
|
2021-10-14 18:03:11 +08:00
|
|
|
etag, errCode := s3a.putToFiler(r, uploadUrl, dataReader)
|
2020-09-20 05:09:58 +08:00
|
|
|
if errCode != s3err.ErrNone {
|
2021-11-01 09:05:34 +08:00
|
|
|
s3err.WriteErrorResponse(w, r, errCode)
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
setEtag(w, etag)
|
|
|
|
|
2021-11-01 09:02:08 +08:00
|
|
|
writeSuccessResponseEmpty(w, r)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s3a *S3ApiServer) genUploadsFolder(bucket string) string {
|
2018-09-20 13:03:16 +08:00
|
|
|
return fmt.Sprintf("%s/%s/.uploads", s3a.option.BucketsPath, bucket)
|
2018-09-04 02:38:10 +08:00
|
|
|
}
|
|
|
|
|
2022-04-12 11:04:38 +08:00
|
|
|
// Generate uploadID hash string from object
|
|
|
|
func (s3a *S3ApiServer) generateUploadID(object string) string {
|
|
|
|
if strings.HasPrefix(object, "/") {
|
|
|
|
object = object[1:]
|
2022-04-11 18:59:36 +08:00
|
|
|
}
|
2022-04-12 11:04:38 +08:00
|
|
|
h := sha1.New()
|
|
|
|
h.Write([]byte(object))
|
|
|
|
return fmt.Sprintf("%x", h.Sum(nil))
|
|
|
|
}
|
|
|
|
|
|
|
|
//Check object name and uploadID when processing multipart uploading
|
2022-04-12 13:29:50 +08:00
|
|
|
func (s3a *S3ApiServer) checkUploadId(object string, id string) error {
|
2022-04-11 18:59:36 +08:00
|
|
|
|
2022-04-12 11:04:38 +08:00
|
|
|
hash := s3a.generateUploadID(object)
|
|
|
|
if hash != id {
|
2022-04-11 18:59:36 +08:00
|
|
|
glog.Errorf("object %s and uploadID %s are not matched", object, id)
|
|
|
|
return fmt.Errorf("object %s and uploadID %s are not matched", object, id)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
// Parse bucket url queries for ?uploads
|
|
|
|
func getBucketMultipartResources(values url.Values) (prefix, keyMarker, uploadIDMarker, delimiter string, maxUploads int, encodingType string) {
|
|
|
|
prefix = values.Get("prefix")
|
|
|
|
keyMarker = values.Get("key-marker")
|
|
|
|
uploadIDMarker = values.Get("upload-id-marker")
|
|
|
|
delimiter = values.Get("delimiter")
|
|
|
|
if values.Get("max-uploads") != "" {
|
|
|
|
maxUploads, _ = strconv.Atoi(values.Get("max-uploads"))
|
|
|
|
} else {
|
|
|
|
maxUploads = maxUploadsList
|
|
|
|
}
|
|
|
|
encodingType = values.Get("encoding-type")
|
|
|
|
return
|
|
|
|
}
|
2018-09-04 02:38:10 +08:00
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
// Parse object url queries
|
|
|
|
func getObjectResources(values url.Values) (uploadID string, partNumberMarker, maxParts int, encodingType string) {
|
|
|
|
uploadID = values.Get("uploadId")
|
|
|
|
partNumberMarker, _ = strconv.Atoi(values.Get("part-number-marker"))
|
|
|
|
if values.Get("max-parts") != "" {
|
|
|
|
maxParts, _ = strconv.Atoi(values.Get("max-parts"))
|
|
|
|
} else {
|
|
|
|
maxParts = maxPartsList
|
|
|
|
}
|
|
|
|
encodingType = values.Get("encoding-type")
|
|
|
|
return
|
2018-09-04 02:38:10 +08:00
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
func xmlDecoder(body io.Reader, v interface{}, size int64) error {
|
|
|
|
var lbody io.Reader
|
|
|
|
if size > 0 {
|
|
|
|
lbody = io.LimitReader(body, size)
|
|
|
|
} else {
|
|
|
|
lbody = body
|
|
|
|
}
|
|
|
|
d := xml.NewDecoder(lbody)
|
|
|
|
d.CharsetReader = func(label string, input io.Reader) (io.Reader, error) {
|
|
|
|
return input, nil
|
|
|
|
}
|
|
|
|
return d.Decode(v)
|
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
type CompleteMultipartUpload struct {
|
|
|
|
Parts []CompletedPart `xml:"Part"`
|
|
|
|
}
|
|
|
|
type CompletedPart struct {
|
|
|
|
ETag string
|
|
|
|
PartNumber int
|
|
|
|
}
|