2012-09-21 08:58:29 +08:00
|
|
|
package operation
|
|
|
|
|
|
|
|
import (
|
2012-09-26 18:27:10 +08:00
|
|
|
"bytes"
|
|
|
|
"encoding/json"
|
2013-01-17 16:56:56 +08:00
|
|
|
"errors"
|
2013-07-12 10:14:55 +08:00
|
|
|
"fmt"
|
2012-09-26 18:27:10 +08:00
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
2013-07-12 10:14:55 +08:00
|
|
|
"mime"
|
2012-09-26 18:27:10 +08:00
|
|
|
"mime/multipart"
|
|
|
|
"net/http"
|
2013-07-12 10:14:55 +08:00
|
|
|
"net/textproto"
|
|
|
|
"path/filepath"
|
2013-07-12 12:16:54 +08:00
|
|
|
"strings"
|
2020-05-01 08:20:44 +08:00
|
|
|
"time"
|
2014-10-27 02:34:55 +08:00
|
|
|
|
2016-06-03 09:09:14 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/glog"
|
2020-05-01 08:20:44 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
|
2016-06-03 09:09:14 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/security"
|
2019-04-19 12:43:36 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/weed/util"
|
2012-09-21 08:58:29 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
type UploadResult struct {
|
2020-08-07 01:04:17 +08:00
|
|
|
Name string `json:"name,omitempty"`
|
|
|
|
Size uint32 `json:"size,omitempty"`
|
|
|
|
Error string `json:"error,omitempty"`
|
|
|
|
ETag string `json:"eTag,omitempty"`
|
|
|
|
CipherKey []byte `json:"cipherKey,omitempty"`
|
|
|
|
Mime string `json:"mime,omitempty"`
|
|
|
|
Gzip uint32 `json:"gzip,omitempty"`
|
|
|
|
ContentMd5 string `json:"contentMd5,omitempty"`
|
2012-09-21 08:58:29 +08:00
|
|
|
}
|
|
|
|
|
2020-05-01 08:20:44 +08:00
|
|
|
func (uploadResult *UploadResult) ToPbFileChunk(fileId string, offset int64) *filer_pb.FileChunk {
|
|
|
|
return &filer_pb.FileChunk{
|
2020-06-20 23:15:49 +08:00
|
|
|
FileId: fileId,
|
|
|
|
Offset: offset,
|
|
|
|
Size: uint64(uploadResult.Size),
|
|
|
|
Mtime: time.Now().UnixNano(),
|
|
|
|
ETag: uploadResult.ETag,
|
|
|
|
CipherKey: uploadResult.CipherKey,
|
|
|
|
IsCompressed: uploadResult.Gzip > 0,
|
2020-05-01 08:20:44 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-25 02:38:59 +08:00
|
|
|
// HTTPClient interface for testing
|
|
|
|
type HTTPClient interface {
|
|
|
|
Do(req *http.Request) (*http.Response, error)
|
|
|
|
}
|
|
|
|
|
2014-03-13 05:07:01 +08:00
|
|
|
var (
|
2020-06-25 02:38:59 +08:00
|
|
|
HttpClient HTTPClient
|
2014-03-13 05:07:01 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
2020-06-25 02:38:59 +08:00
|
|
|
HttpClient = &http.Client{Transport: &http.Transport{
|
2019-01-19 06:14:47 +08:00
|
|
|
MaxIdleConnsPerHost: 1024,
|
|
|
|
}}
|
2014-03-13 05:07:01 +08:00
|
|
|
}
|
|
|
|
|
2013-07-16 08:26:00 +08:00
|
|
|
var fileNameEscaper = strings.NewReplacer("\\", "\\\\", "\"", "\\\"")
|
|
|
|
|
2019-04-19 11:21:28 +08:00
|
|
|
// Upload sends a POST request to a volume server to upload the content with adjustable compression level
|
2020-06-25 02:38:59 +08:00
|
|
|
func UploadData(uploadUrl string, filename string, cipher bool, data []byte, isInputCompressed bool, mtype string, pairMap map[string]string, jwt security.EncodedJwt) (uploadResult *UploadResult, err error) {
|
|
|
|
uploadResult, err = doUploadData(uploadUrl, filename, cipher, data, isInputCompressed, mtype, pairMap, jwt)
|
2020-03-09 12:39:33 +08:00
|
|
|
return
|
2019-04-19 11:21:28 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Upload sends a POST request to a volume server to upload the content with fast compression
|
2020-06-25 02:38:59 +08:00
|
|
|
func Upload(uploadUrl string, filename string, cipher bool, reader io.Reader, isInputCompressed bool, mtype string, pairMap map[string]string, jwt security.EncodedJwt) (uploadResult *UploadResult, err error, data []byte) {
|
|
|
|
uploadResult, err, data = doUpload(uploadUrl, filename, cipher, reader, isInputCompressed, mtype, pairMap, jwt)
|
2020-03-09 12:39:33 +08:00
|
|
|
return
|
2019-04-19 11:21:28 +08:00
|
|
|
}
|
|
|
|
|
2020-06-25 02:38:59 +08:00
|
|
|
func doUpload(uploadUrl string, filename string, cipher bool, reader io.Reader, isInputCompressed bool, mtype string, pairMap map[string]string, jwt security.EncodedJwt) (uploadResult *UploadResult, err error, data []byte) {
|
2020-03-29 04:41:58 +08:00
|
|
|
data, err = ioutil.ReadAll(reader)
|
|
|
|
if err != nil {
|
|
|
|
err = fmt.Errorf("read input: %v", err)
|
2020-03-15 04:55:32 +08:00
|
|
|
return
|
|
|
|
}
|
2020-06-25 02:38:59 +08:00
|
|
|
uploadResult, uploadErr := doUploadData(uploadUrl, filename, cipher, data, isInputCompressed, mtype, pairMap, jwt)
|
2020-03-29 04:41:58 +08:00
|
|
|
return uploadResult, uploadErr, data
|
2020-03-09 12:39:33 +08:00
|
|
|
}
|
|
|
|
|
2020-06-25 02:38:59 +08:00
|
|
|
func doUploadData(uploadUrl string, filename string, cipher bool, data []byte, isInputCompressed bool, mtype string, pairMap map[string]string, jwt security.EncodedJwt) (uploadResult *UploadResult, err error) {
|
|
|
|
contentIsGzipped := isInputCompressed
|
2020-03-09 12:39:33 +08:00
|
|
|
shouldGzipNow := false
|
2020-06-25 02:38:59 +08:00
|
|
|
if !isInputCompressed {
|
2020-04-15 02:32:31 +08:00
|
|
|
if mtype == "" {
|
|
|
|
mtype = http.DetectContentType(data)
|
2020-06-20 13:11:36 +08:00
|
|
|
// println("detect1 mimetype to", mtype)
|
2020-04-15 02:32:31 +08:00
|
|
|
if mtype == "application/octet-stream" {
|
|
|
|
mtype = ""
|
|
|
|
}
|
|
|
|
}
|
2020-06-24 00:12:02 +08:00
|
|
|
if shouldBeCompressed, iAmSure := util.IsCompressableFileType(filepath.Base(filename), mtype); iAmSure && shouldBeCompressed {
|
2020-03-09 12:39:33 +08:00
|
|
|
shouldGzipNow = true
|
2020-04-15 02:02:05 +08:00
|
|
|
} else if !iAmSure && mtype == "" && len(data) > 128 {
|
2020-03-27 14:50:48 +08:00
|
|
|
var compressed []byte
|
|
|
|
compressed, err = util.GzipData(data[0:128])
|
|
|
|
shouldGzipNow = len(compressed)*10 < 128*9 // can not compress to less than 90%
|
2019-04-07 05:14:28 +08:00
|
|
|
}
|
2020-03-09 12:39:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
var clearDataLen int
|
|
|
|
|
|
|
|
// gzip if possible
|
|
|
|
// this could be double copying
|
|
|
|
clearDataLen = len(data)
|
|
|
|
if shouldGzipNow {
|
2020-03-27 14:50:48 +08:00
|
|
|
compressed, compressErr := util.GzipData(data)
|
|
|
|
// fmt.Printf("data is compressed from %d ==> %d\n", len(data), len(compressed))
|
|
|
|
if compressErr == nil {
|
|
|
|
data = compressed
|
|
|
|
contentIsGzipped = true
|
|
|
|
}
|
2020-06-25 02:38:59 +08:00
|
|
|
} else if isInputCompressed {
|
2020-03-09 12:39:33 +08:00
|
|
|
// just to get the clear data length
|
2020-06-20 23:16:16 +08:00
|
|
|
clearData, err := util.DecompressData(data)
|
2020-03-09 12:39:33 +08:00
|
|
|
if err == nil {
|
|
|
|
clearDataLen = len(clearData)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if cipher {
|
|
|
|
// encrypt(gzip(data))
|
|
|
|
|
|
|
|
// encrypt
|
|
|
|
cipherKey := util.GenCipherKey()
|
|
|
|
encryptedData, encryptionErr := util.Encrypt(data, cipherKey)
|
|
|
|
if encryptionErr != nil {
|
|
|
|
err = fmt.Errorf("encrypt input: %v", encryptionErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// upload data
|
|
|
|
uploadResult, err = upload_content(uploadUrl, func(w io.Writer) (err error) {
|
2020-03-09 12:54:47 +08:00
|
|
|
_, err = w.Write(encryptedData)
|
2020-03-09 12:39:33 +08:00
|
|
|
return
|
2020-06-24 00:12:02 +08:00
|
|
|
}, "", false, len(encryptedData), "", nil, jwt)
|
2020-03-09 12:39:33 +08:00
|
|
|
if uploadResult != nil {
|
|
|
|
uploadResult.Name = filename
|
|
|
|
uploadResult.Mime = mtype
|
|
|
|
uploadResult.CipherKey = cipherKey
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// upload data
|
|
|
|
uploadResult, err = upload_content(uploadUrl, func(w io.Writer) (err error) {
|
2020-03-09 12:54:47 +08:00
|
|
|
_, err = w.Write(data)
|
2020-03-09 12:39:33 +08:00
|
|
|
return
|
2020-06-24 00:12:02 +08:00
|
|
|
}, filename, contentIsGzipped, 0, mtype, pairMap, jwt)
|
2020-03-09 12:39:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if uploadResult == nil {
|
|
|
|
return
|
|
|
|
}
|
2020-03-06 16:49:47 +08:00
|
|
|
|
2020-03-09 12:39:33 +08:00
|
|
|
uploadResult.Size = uint32(clearDataLen)
|
|
|
|
if contentIsGzipped {
|
|
|
|
uploadResult.Gzip = 1
|
2020-03-06 16:49:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
return uploadResult, err
|
2013-08-14 14:26:51 +08:00
|
|
|
}
|
2019-04-19 11:21:28 +08:00
|
|
|
|
2020-06-24 00:12:02 +08:00
|
|
|
func upload_content(uploadUrl string, fillBufferFunction func(w io.Writer) error, filename string, isGzipped bool, originalDataSize int, mtype string, pairMap map[string]string, jwt security.EncodedJwt) (*UploadResult, error) {
|
2012-09-26 18:27:10 +08:00
|
|
|
body_buf := bytes.NewBufferString("")
|
|
|
|
body_writer := multipart.NewWriter(body_buf)
|
2013-07-12 10:14:55 +08:00
|
|
|
h := make(textproto.MIMEHeader)
|
2013-07-16 08:26:00 +08:00
|
|
|
h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="file"; filename="%s"`, fileNameEscaper.Replace(filename)))
|
2013-07-30 01:09:36 +08:00
|
|
|
if mtype == "" {
|
|
|
|
mtype = mime.TypeByExtension(strings.ToLower(filepath.Ext(filename)))
|
|
|
|
}
|
2013-08-14 14:26:51 +08:00
|
|
|
if mtype != "" {
|
|
|
|
h.Set("Content-Type", mtype)
|
|
|
|
}
|
2013-07-16 02:04:43 +08:00
|
|
|
if isGzipped {
|
|
|
|
h.Set("Content-Encoding", "gzip")
|
|
|
|
}
|
2017-01-08 09:16:29 +08:00
|
|
|
|
2014-03-13 06:17:23 +08:00
|
|
|
file_writer, cp_err := body_writer.CreatePart(h)
|
|
|
|
if cp_err != nil {
|
|
|
|
glog.V(0).Infoln("error creating form file", cp_err.Error())
|
|
|
|
return nil, cp_err
|
2013-02-27 14:54:22 +08:00
|
|
|
}
|
2014-03-13 06:17:23 +08:00
|
|
|
if err := fillBufferFunction(file_writer); err != nil {
|
2013-08-09 14:57:22 +08:00
|
|
|
glog.V(0).Infoln("error copying data", err)
|
2013-02-27 14:54:22 +08:00
|
|
|
return nil, err
|
|
|
|
}
|
2013-07-10 15:27:01 +08:00
|
|
|
content_type := body_writer.FormDataContentType()
|
2014-03-13 06:17:23 +08:00
|
|
|
if err := body_writer.Close(); err != nil {
|
2013-08-09 14:57:22 +08:00
|
|
|
glog.V(0).Infoln("error closing body", err)
|
2013-02-27 14:54:22 +08:00
|
|
|
return nil, err
|
|
|
|
}
|
2017-01-08 09:16:29 +08:00
|
|
|
|
|
|
|
req, postErr := http.NewRequest("POST", uploadUrl, body_buf)
|
|
|
|
if postErr != nil {
|
2020-07-15 02:27:00 +08:00
|
|
|
glog.V(1).Infof("failing to upload to %s: %v", uploadUrl, postErr)
|
|
|
|
return nil, fmt.Errorf("failing to upload to %s: %v", uploadUrl, postErr)
|
2017-01-08 09:16:29 +08:00
|
|
|
}
|
|
|
|
req.Header.Set("Content-Type", content_type)
|
|
|
|
for k, v := range pairMap {
|
|
|
|
req.Header.Set(k, v)
|
|
|
|
}
|
2019-02-15 16:09:19 +08:00
|
|
|
if jwt != "" {
|
|
|
|
req.Header.Set("Authorization", "BEARER "+string(jwt))
|
|
|
|
}
|
2020-06-25 02:38:59 +08:00
|
|
|
resp, post_err := HttpClient.Do(req)
|
2014-03-13 06:17:23 +08:00
|
|
|
if post_err != nil {
|
2020-07-15 02:27:00 +08:00
|
|
|
glog.V(1).Infof("failing to upload to %v: %v", uploadUrl, post_err)
|
|
|
|
return nil, fmt.Errorf("failing to upload to %v: %v", uploadUrl, post_err)
|
2012-09-26 18:27:10 +08:00
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
2020-03-15 19:16:00 +08:00
|
|
|
|
|
|
|
var ret UploadResult
|
2018-09-23 13:12:21 +08:00
|
|
|
etag := getEtag(resp)
|
2020-03-15 19:16:00 +08:00
|
|
|
if resp.StatusCode == http.StatusNoContent {
|
|
|
|
ret.ETag = etag
|
|
|
|
return &ret, nil
|
|
|
|
}
|
2014-03-13 06:17:23 +08:00
|
|
|
resp_body, ra_err := ioutil.ReadAll(resp.Body)
|
|
|
|
if ra_err != nil {
|
|
|
|
return nil, ra_err
|
2012-09-26 18:27:10 +08:00
|
|
|
}
|
2014-03-13 06:17:23 +08:00
|
|
|
unmarshal_err := json.Unmarshal(resp_body, &ret)
|
|
|
|
if unmarshal_err != nil {
|
2017-01-08 09:16:29 +08:00
|
|
|
glog.V(0).Infoln("failing to read upload response", uploadUrl, string(resp_body))
|
2014-03-13 06:17:23 +08:00
|
|
|
return nil, unmarshal_err
|
2012-09-26 18:27:10 +08:00
|
|
|
}
|
2013-01-17 16:56:56 +08:00
|
|
|
if ret.Error != "" {
|
|
|
|
return nil, errors.New(ret.Error)
|
2012-09-27 05:28:46 +08:00
|
|
|
}
|
2018-09-23 13:12:21 +08:00
|
|
|
ret.ETag = etag
|
2020-08-07 01:04:17 +08:00
|
|
|
ret.ContentMd5 = resp.Header.Get("Content-MD5")
|
2012-09-26 18:27:10 +08:00
|
|
|
return &ret, nil
|
2012-09-21 08:58:29 +08:00
|
|
|
}
|
2018-09-23 13:12:21 +08:00
|
|
|
|
|
|
|
func getEtag(r *http.Response) (etag string) {
|
|
|
|
etag = r.Header.Get("ETag")
|
|
|
|
if strings.HasPrefix(etag, "\"") && strings.HasSuffix(etag, "\"") {
|
|
|
|
etag = etag[1 : len(etag)-1]
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|