2018-09-04 15:42:44 +08:00
|
|
|
package s3api
|
|
|
|
|
|
|
|
import (
|
2022-03-29 22:21:09 +08:00
|
|
|
"encoding/hex"
|
2019-02-27 16:21:37 +08:00
|
|
|
"encoding/xml"
|
2018-09-10 07:25:43 +08:00
|
|
|
"fmt"
|
2022-11-10 23:17:29 +08:00
|
|
|
"github.com/google/uuid"
|
2022-07-29 15:17:28 +08:00
|
|
|
"github.com/seaweedfs/seaweedfs/weed/s3api/s3err"
|
2022-04-18 10:35:43 +08:00
|
|
|
"golang.org/x/exp/slices"
|
2022-04-27 22:27:44 +08:00
|
|
|
"math"
|
2018-09-10 07:25:43 +08:00
|
|
|
"path/filepath"
|
2022-03-23 16:05:14 +08:00
|
|
|
"sort"
|
2018-09-10 07:26:11 +08:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2018-09-10 07:25:43 +08:00
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
"github.com/aws/aws-sdk-go/aws"
|
2018-09-10 07:26:11 +08:00
|
|
|
"github.com/aws/aws-sdk-go/service/s3"
|
2020-02-26 14:23:59 +08:00
|
|
|
|
2022-07-29 15:17:28 +08:00
|
|
|
"github.com/seaweedfs/seaweedfs/weed/filer"
|
|
|
|
"github.com/seaweedfs/seaweedfs/weed/glog"
|
|
|
|
"github.com/seaweedfs/seaweedfs/weed/pb/filer_pb"
|
2018-09-04 15:42:44 +08:00
|
|
|
)
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
type InitiateMultipartUploadResult struct {
|
2019-02-27 16:21:37 +08:00
|
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ InitiateMultipartUploadResult"`
|
2018-09-12 15:46:12 +08:00
|
|
|
s3.CreateMultipartUploadOutput
|
|
|
|
}
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
func (s3a *S3ApiServer) createMultipartUpload(input *s3.CreateMultipartUploadInput) (output *InitiateMultipartUploadResult, code s3err.ErrorCode) {
|
2020-09-22 01:51:24 +08:00
|
|
|
|
|
|
|
glog.V(2).Infof("createMultipartUpload input %v", input)
|
|
|
|
|
2022-04-12 11:04:38 +08:00
|
|
|
uploadIdString := s3a.generateUploadID(*input.Key)
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2022-11-15 22:33:36 +08:00
|
|
|
uploadIdString = uploadIdString + "_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
2022-11-10 23:17:29 +08:00
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
if err := s3a.mkdir(s3a.genUploadsFolder(*input.Bucket), uploadIdString, func(entry *filer_pb.Entry) {
|
2018-09-04 15:42:44 +08:00
|
|
|
if entry.Extended == nil {
|
2018-09-08 04:12:52 +08:00
|
|
|
entry.Extended = make(map[string][]byte)
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
2018-09-08 04:12:52 +08:00
|
|
|
entry.Extended["key"] = []byte(*input.Key)
|
2021-07-02 10:12:11 +08:00
|
|
|
for k, v := range input.Metadata {
|
|
|
|
entry.Extended[k] = []byte(*v)
|
|
|
|
}
|
2021-10-12 20:14:54 +08:00
|
|
|
if input.ContentType != nil {
|
|
|
|
entry.Attributes.Mime = *input.ContentType
|
|
|
|
}
|
2018-09-04 15:42:44 +08:00
|
|
|
}); err != nil {
|
|
|
|
glog.Errorf("NewMultipartUpload error: %v", err)
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrInternalError
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
output = &InitiateMultipartUploadResult{
|
2019-02-27 16:21:37 +08:00
|
|
|
CreateMultipartUploadOutput: s3.CreateMultipartUploadOutput{
|
2018-09-12 15:46:12 +08:00
|
|
|
Bucket: input.Bucket,
|
2019-07-09 03:37:20 +08:00
|
|
|
Key: objectKey(input.Key),
|
2018-09-12 15:46:12 +08:00
|
|
|
UploadId: aws.String(uploadIdString),
|
|
|
|
},
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
type CompleteMultipartUploadResult struct {
|
2019-02-27 16:21:37 +08:00
|
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ CompleteMultipartUploadResult"`
|
2018-09-12 15:46:12 +08:00
|
|
|
s3.CompleteMultipartUploadOutput
|
|
|
|
}
|
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
func (s3a *S3ApiServer) completeMultipartUpload(input *s3.CompleteMultipartUploadInput, parts *CompleteMultipartUpload) (output *CompleteMultipartUploadResult, code s3err.ErrorCode) {
|
2018-09-10 07:25:43 +08:00
|
|
|
|
2020-09-22 01:51:24 +08:00
|
|
|
glog.V(2).Infof("completeMultipartUpload input %v", input)
|
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
completedParts := parts.Parts
|
2022-04-18 10:35:43 +08:00
|
|
|
slices.SortFunc(completedParts, func(a, b CompletedPart) bool {
|
|
|
|
return a.PartNumber < b.PartNumber
|
2022-03-23 16:05:14 +08:00
|
|
|
})
|
|
|
|
|
2018-09-10 07:25:43 +08:00
|
|
|
uploadDirectory := s3a.genUploadsFolder(*input.Bucket) + "/" + *input.UploadId
|
|
|
|
|
2021-10-19 05:14:18 +08:00
|
|
|
entries, _, err := s3a.list(uploadDirectory, "", "", false, maxPartsList)
|
2020-08-26 23:48:58 +08:00
|
|
|
if err != nil || len(entries) == 0 {
|
|
|
|
glog.Errorf("completeMultipartUpload %s %s error: %v, entries:%d", *input.Bucket, *input.UploadId, err, len(entries))
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrNoSuchUpload
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2021-07-22 05:38:12 +08:00
|
|
|
pentry, err := s3a.getEntry(s3a.genUploadsFolder(*input.Bucket), *input.UploadId)
|
2021-07-02 11:00:42 +08:00
|
|
|
if err != nil {
|
|
|
|
glog.Errorf("completeMultipartUpload %s %s error: %v", *input.Bucket, *input.UploadId, err)
|
|
|
|
return nil, s3err.ErrNoSuchUpload
|
|
|
|
}
|
|
|
|
|
2023-08-17 15:51:42 +08:00
|
|
|
// check whether completedParts is more than received parts
|
|
|
|
{
|
|
|
|
partNumbers := make(map[int]struct{}, len(entries))
|
|
|
|
for _, entry := range entries {
|
|
|
|
if strings.HasSuffix(entry.Name, ".part") && !entry.IsDirectory {
|
|
|
|
partNumberString := entry.Name[:len(entry.Name)-len(".part")]
|
|
|
|
partNumber, err := strconv.Atoi(partNumberString)
|
|
|
|
if err == nil {
|
|
|
|
partNumbers[partNumber] = struct{}{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, part := range completedParts {
|
|
|
|
if _, found := partNumbers[part.PartNumber]; !found {
|
|
|
|
return nil, s3err.ErrInvalidPart
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
mime := pentry.Attributes.Mime
|
|
|
|
|
2018-09-10 07:25:43 +08:00
|
|
|
var finalParts []*filer_pb.FileChunk
|
|
|
|
var offset int64
|
|
|
|
|
|
|
|
for _, entry := range entries {
|
|
|
|
if strings.HasSuffix(entry.Name, ".part") && !entry.IsDirectory {
|
2022-03-29 22:21:09 +08:00
|
|
|
partETag, found := findByPartNumber(entry.Name, completedParts)
|
2022-03-23 16:05:14 +08:00
|
|
|
if !found {
|
|
|
|
continue
|
2021-10-14 18:03:11 +08:00
|
|
|
}
|
2022-03-29 22:21:09 +08:00
|
|
|
entryETag := hex.EncodeToString(entry.Attributes.GetMd5())
|
|
|
|
if partETag != "" && len(partETag) == 32 && entryETag != "" && entryETag != partETag {
|
|
|
|
glog.Errorf("completeMultipartUpload %s ETag mismatch chunk: %s part: %s", entry.Name, entryETag, partETag)
|
|
|
|
return nil, s3err.ErrInvalidPart
|
|
|
|
}
|
2022-11-15 22:33:36 +08:00
|
|
|
for _, chunk := range entry.GetChunks() {
|
2018-09-12 15:46:12 +08:00
|
|
|
p := &filer_pb.FileChunk{
|
2022-10-29 03:53:19 +08:00
|
|
|
FileId: chunk.GetFileIdString(),
|
|
|
|
Offset: offset,
|
|
|
|
Size: chunk.Size,
|
|
|
|
ModifiedTsNs: chunk.ModifiedTsNs,
|
|
|
|
CipherKey: chunk.CipherKey,
|
|
|
|
ETag: chunk.ETag,
|
2018-09-12 15:46:12 +08:00
|
|
|
}
|
|
|
|
finalParts = append(finalParts, p)
|
2018-09-10 07:25:43 +08:00
|
|
|
offset += int64(chunk.Size)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-10-29 03:53:19 +08:00
|
|
|
|
2018-09-10 07:25:43 +08:00
|
|
|
entryName := filepath.Base(*input.Key)
|
2023-07-12 00:05:14 +08:00
|
|
|
dirName := filepath.ToSlash(filepath.Dir(*input.Key))
|
2018-09-10 07:25:43 +08:00
|
|
|
if dirName == "." {
|
|
|
|
dirName = ""
|
|
|
|
}
|
2018-09-12 15:46:12 +08:00
|
|
|
if strings.HasPrefix(dirName, "/") {
|
|
|
|
dirName = dirName[1:]
|
|
|
|
}
|
2018-09-10 07:25:43 +08:00
|
|
|
dirName = fmt.Sprintf("%s/%s/%s", s3a.option.BucketsPath, *input.Bucket, dirName)
|
|
|
|
|
2019-09-29 21:05:37 +08:00
|
|
|
// remove suffix '/'
|
|
|
|
if strings.HasSuffix(dirName, "/") {
|
|
|
|
dirName = dirName[:len(dirName)-1]
|
|
|
|
}
|
|
|
|
|
2021-07-22 05:38:12 +08:00
|
|
|
err = s3a.mkFile(dirName, entryName, finalParts, func(entry *filer_pb.Entry) {
|
2021-07-02 11:00:42 +08:00
|
|
|
if entry.Extended == nil {
|
|
|
|
entry.Extended = make(map[string][]byte)
|
|
|
|
}
|
2021-07-22 05:38:12 +08:00
|
|
|
for k, v := range pentry.Extended {
|
2021-07-02 11:00:42 +08:00
|
|
|
if k != "key" {
|
|
|
|
entry.Extended[k] = v
|
|
|
|
}
|
|
|
|
}
|
2021-10-12 20:14:54 +08:00
|
|
|
if pentry.Attributes.Mime != "" {
|
|
|
|
entry.Attributes.Mime = pentry.Attributes.Mime
|
2021-10-14 18:03:11 +08:00
|
|
|
} else if mime != "" {
|
|
|
|
entry.Attributes.Mime = mime
|
2021-10-12 20:14:54 +08:00
|
|
|
}
|
2022-09-15 16:27:02 +08:00
|
|
|
entry.Attributes.FileSize = uint64(offset)
|
2021-07-02 11:00:42 +08:00
|
|
|
})
|
2018-09-10 07:25:43 +08:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
glog.Errorf("completeMultipartUpload %s/%s error: %v", dirName, entryName, err)
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrInternalError
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
output = &CompleteMultipartUploadResult{
|
2019-02-27 16:21:37 +08:00
|
|
|
CompleteMultipartUploadOutput: s3.CompleteMultipartUploadOutput{
|
2022-10-30 08:54:30 +08:00
|
|
|
Location: aws.String(fmt.Sprintf("http://%s%s/%s", s3a.option.Filer.ToHttpAddress(), urlEscapeObject(dirName), urlPathEscape(entryName))),
|
2019-07-22 12:51:38 +08:00
|
|
|
Bucket: input.Bucket,
|
2020-09-01 15:21:19 +08:00
|
|
|
ETag: aws.String("\"" + filer.ETagChunks(finalParts) + "\""),
|
2019-07-22 12:51:38 +08:00
|
|
|
Key: objectKey(input.Key),
|
2018-09-12 15:46:12 +08:00
|
|
|
},
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2020-03-21 05:17:31 +08:00
|
|
|
if err = s3a.rm(s3a.genUploadsFolder(*input.Bucket), *input.UploadId, false, true); err != nil {
|
2018-09-12 16:00:51 +08:00
|
|
|
glog.V(1).Infof("completeMultipartUpload cleanup %s upload %s: %v", *input.Bucket, *input.UploadId, err)
|
|
|
|
}
|
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-23 16:05:14 +08:00
|
|
|
func findByPartNumber(fileName string, parts []CompletedPart) (etag string, found bool) {
|
|
|
|
partNumber, formatErr := strconv.Atoi(fileName[:4])
|
|
|
|
if formatErr != nil {
|
|
|
|
return
|
|
|
|
}
|
2022-03-29 22:53:12 +08:00
|
|
|
x := sort.Search(len(parts), func(i int) bool {
|
|
|
|
return parts[i].PartNumber >= partNumber
|
|
|
|
})
|
2022-05-03 22:18:34 +08:00
|
|
|
if x >= len(parts) {
|
|
|
|
return
|
|
|
|
}
|
2022-03-29 22:53:12 +08:00
|
|
|
if parts[x].PartNumber != partNumber {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
y := 0
|
|
|
|
for i, part := range parts[x:] {
|
2022-03-29 22:21:09 +08:00
|
|
|
if part.PartNumber == partNumber {
|
2022-03-29 22:53:12 +08:00
|
|
|
y = i
|
|
|
|
} else {
|
|
|
|
break
|
2022-03-29 22:21:09 +08:00
|
|
|
}
|
|
|
|
}
|
2022-03-29 22:53:12 +08:00
|
|
|
return parts[x+y].ETag, true
|
2022-03-23 16:05:14 +08:00
|
|
|
}
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
func (s3a *S3ApiServer) abortMultipartUpload(input *s3.AbortMultipartUploadInput) (output *s3.AbortMultipartUploadOutput, code s3err.ErrorCode) {
|
2018-09-04 15:42:44 +08:00
|
|
|
|
2020-09-22 01:51:24 +08:00
|
|
|
glog.V(2).Infof("abortMultipartUpload input %v", input)
|
|
|
|
|
2020-02-26 14:23:59 +08:00
|
|
|
exists, err := s3a.exists(s3a.genUploadsFolder(*input.Bucket), *input.UploadId, true)
|
2018-09-04 15:42:44 +08:00
|
|
|
if err != nil {
|
2018-09-10 07:25:43 +08:00
|
|
|
glog.V(1).Infof("bucket %s abort upload %s: %v", *input.Bucket, *input.UploadId, err)
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrNoSuchUpload
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
2018-09-10 07:25:43 +08:00
|
|
|
if exists {
|
2020-03-21 05:17:31 +08:00
|
|
|
err = s3a.rm(s3a.genUploadsFolder(*input.Bucket), *input.UploadId, true, true)
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
glog.V(1).Infof("bucket %s remove upload %s: %v", *input.Bucket, *input.UploadId, err)
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrInternalError
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
return &s3.AbortMultipartUploadOutput{}, s3err.ErrNone
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
type ListMultipartUploadsResult struct {
|
2019-02-27 16:21:37 +08:00
|
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListMultipartUploadsResult"`
|
2020-09-12 06:04:01 +08:00
|
|
|
|
|
|
|
// copied from s3.ListMultipartUploadsOutput, the Uploads is not converting to <Upload></Upload>
|
|
|
|
Bucket *string `type:"string"`
|
|
|
|
Delimiter *string `type:"string"`
|
|
|
|
EncodingType *string `type:"string" enum:"EncodingType"`
|
|
|
|
IsTruncated *bool `type:"boolean"`
|
|
|
|
KeyMarker *string `type:"string"`
|
|
|
|
MaxUploads *int64 `type:"integer"`
|
|
|
|
NextKeyMarker *string `type:"string"`
|
|
|
|
NextUploadIdMarker *string `type:"string"`
|
|
|
|
Prefix *string `type:"string"`
|
|
|
|
UploadIdMarker *string `type:"string"`
|
|
|
|
Upload []*s3.MultipartUpload `locationName:"Upload" type:"list" flattened:"true"`
|
2018-09-12 15:46:12 +08:00
|
|
|
}
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
func (s3a *S3ApiServer) listMultipartUploads(input *s3.ListMultipartUploadsInput) (output *ListMultipartUploadsResult, code s3err.ErrorCode) {
|
2020-09-12 06:04:01 +08:00
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListMultipartUploads.html
|
2018-09-10 07:25:43 +08:00
|
|
|
|
2020-09-22 01:51:24 +08:00
|
|
|
glog.V(2).Infof("listMultipartUploads input %v", input)
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
output = &ListMultipartUploadsResult{
|
2020-09-12 06:04:01 +08:00
|
|
|
Bucket: input.Bucket,
|
|
|
|
Delimiter: input.Delimiter,
|
|
|
|
EncodingType: input.EncodingType,
|
|
|
|
KeyMarker: input.KeyMarker,
|
|
|
|
MaxUploads: input.MaxUploads,
|
|
|
|
Prefix: input.Prefix,
|
2022-09-15 15:27:15 +08:00
|
|
|
IsTruncated: aws.Bool(false),
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
2018-09-10 07:25:43 +08:00
|
|
|
|
2022-04-27 22:27:44 +08:00
|
|
|
entries, _, err := s3a.list(s3a.genUploadsFolder(*input.Bucket), "", *input.UploadIdMarker, false, math.MaxInt32)
|
2018-09-10 07:25:43 +08:00
|
|
|
if err != nil {
|
|
|
|
glog.Errorf("listMultipartUploads %s error: %v", *input.Bucket, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-27 22:27:44 +08:00
|
|
|
uploadsCount := int64(0)
|
2018-09-04 15:42:44 +08:00
|
|
|
for _, entry := range entries {
|
|
|
|
if entry.Extended != nil {
|
2020-09-22 01:51:24 +08:00
|
|
|
key := string(entry.Extended["key"])
|
|
|
|
if *input.KeyMarker != "" && *input.KeyMarker != key {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if *input.Prefix != "" && !strings.HasPrefix(key, *input.Prefix) {
|
|
|
|
continue
|
|
|
|
}
|
2020-09-12 06:04:01 +08:00
|
|
|
output.Upload = append(output.Upload, &s3.MultipartUpload{
|
2020-09-22 01:51:24 +08:00
|
|
|
Key: objectKey(aws.String(key)),
|
2018-09-04 15:42:44 +08:00
|
|
|
UploadId: aws.String(entry.Name),
|
|
|
|
})
|
2022-04-27 22:27:44 +08:00
|
|
|
uploadsCount += 1
|
|
|
|
}
|
|
|
|
if uploadsCount >= *input.MaxUploads {
|
|
|
|
output.IsTruncated = aws.Bool(true)
|
|
|
|
output.NextUploadIdMarker = aws.String(entry.Name)
|
|
|
|
break
|
2018-09-04 15:42:44 +08:00
|
|
|
}
|
|
|
|
}
|
2018-09-12 16:00:51 +08:00
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
type ListPartsResult struct {
|
2019-02-27 16:21:37 +08:00
|
|
|
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ ListPartsResult"`
|
2020-09-12 05:53:50 +08:00
|
|
|
|
|
|
|
// copied from s3.ListPartsOutput, the Parts is not converting to <Part></Part>
|
|
|
|
Bucket *string `type:"string"`
|
|
|
|
IsTruncated *bool `type:"boolean"`
|
|
|
|
Key *string `min:"1" type:"string"`
|
|
|
|
MaxParts *int64 `type:"integer"`
|
|
|
|
NextPartNumberMarker *int64 `type:"integer"`
|
|
|
|
PartNumberMarker *int64 `type:"integer"`
|
|
|
|
Part []*s3.Part `locationName:"Part" type:"list" flattened:"true"`
|
|
|
|
StorageClass *string `type:"string" enum:"StorageClass"`
|
|
|
|
UploadId *string `type:"string"`
|
2018-09-12 15:46:12 +08:00
|
|
|
}
|
|
|
|
|
2020-09-20 05:09:58 +08:00
|
|
|
func (s3a *S3ApiServer) listObjectParts(input *s3.ListPartsInput) (output *ListPartsResult, code s3err.ErrorCode) {
|
2020-09-12 06:07:19 +08:00
|
|
|
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListParts.html
|
|
|
|
|
2020-09-22 01:51:24 +08:00
|
|
|
glog.V(2).Infof("listObjectParts input %v", input)
|
|
|
|
|
2018-09-12 15:46:12 +08:00
|
|
|
output = &ListPartsResult{
|
2020-09-12 05:53:50 +08:00
|
|
|
Bucket: input.Bucket,
|
|
|
|
Key: objectKey(input.Key),
|
|
|
|
UploadId: input.UploadId,
|
|
|
|
MaxParts: input.MaxParts, // the maximum number of parts to return.
|
|
|
|
PartNumberMarker: input.PartNumberMarker, // the part number starts after this, exclusive
|
|
|
|
StorageClass: aws.String("STANDARD"),
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2020-09-12 05:53:50 +08:00
|
|
|
entries, isLast, err := s3a.list(s3a.genUploadsFolder(*input.Bucket)+"/"+*input.UploadId, "", fmt.Sprintf("%04d.part", *input.PartNumberMarker), false, uint32(*input.MaxParts))
|
2018-09-10 07:25:43 +08:00
|
|
|
if err != nil {
|
2018-09-12 04:01:51 +08:00
|
|
|
glog.Errorf("listObjectParts %s %s error: %v", *input.Bucket, *input.UploadId, err)
|
2020-09-20 05:09:58 +08:00
|
|
|
return nil, s3err.ErrNoSuchUpload
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
|
2022-02-19 14:14:40 +08:00
|
|
|
// Note: The upload directory is sort of a marker of the existence of an multipart upload request.
|
|
|
|
// So can not just delete empty upload folders.
|
|
|
|
|
2020-09-12 05:53:50 +08:00
|
|
|
output.IsTruncated = aws.Bool(!isLast)
|
|
|
|
|
2018-09-10 07:25:43 +08:00
|
|
|
for _, entry := range entries {
|
|
|
|
if strings.HasSuffix(entry.Name, ".part") && !entry.IsDirectory {
|
|
|
|
partNumberString := entry.Name[:len(entry.Name)-len(".part")]
|
|
|
|
partNumber, err := strconv.Atoi(partNumberString)
|
|
|
|
if err != nil {
|
2018-09-12 04:01:51 +08:00
|
|
|
glog.Errorf("listObjectParts %s %s parse %s: %v", *input.Bucket, *input.UploadId, entry.Name, err)
|
2018-09-10 07:25:43 +08:00
|
|
|
continue
|
|
|
|
}
|
2020-09-12 05:53:50 +08:00
|
|
|
output.Part = append(output.Part, &s3.Part{
|
2018-09-10 07:25:43 +08:00
|
|
|
PartNumber: aws.Int64(int64(partNumber)),
|
2020-04-30 12:35:24 +08:00
|
|
|
LastModified: aws.Time(time.Unix(entry.Attributes.Mtime, 0).UTC()),
|
2020-09-01 15:21:19 +08:00
|
|
|
Size: aws.Int64(int64(filer.FileSize(entry))),
|
|
|
|
ETag: aws.String("\"" + filer.ETag(entry) + "\""),
|
2018-09-10 07:25:43 +08:00
|
|
|
})
|
2020-09-12 05:53:50 +08:00
|
|
|
if !isLast {
|
|
|
|
output.NextPartNumberMarker = aws.Int64(int64(partNumber))
|
|
|
|
}
|
2018-09-10 07:25:43 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-04 15:42:44 +08:00
|
|
|
return
|
|
|
|
}
|