2011-12-16 22:51:26 +08:00
|
|
|
package storage
|
|
|
|
|
|
|
|
import (
|
2012-06-29 15:53:47 +08:00
|
|
|
"encoding/hex"
|
2013-12-10 05:53:24 +08:00
|
|
|
"errors"
|
2014-12-26 13:29:44 +08:00
|
|
|
"fmt"
|
2011-12-16 22:51:26 +08:00
|
|
|
"io/ioutil"
|
2012-07-30 16:37:10 +08:00
|
|
|
"mime"
|
2012-06-29 15:53:47 +08:00
|
|
|
"net/http"
|
2013-01-23 07:07:51 +08:00
|
|
|
"path"
|
2012-07-17 01:15:16 +08:00
|
|
|
"strconv"
|
2011-12-16 22:51:26 +08:00
|
|
|
"strings"
|
2013-07-09 14:38:38 +08:00
|
|
|
"time"
|
2014-10-27 02:34:55 +08:00
|
|
|
|
2015-04-17 03:18:06 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/go/glog"
|
|
|
|
"github.com/chrislusf/seaweedfs/go/images"
|
2015-12-02 21:27:29 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/go/operation"
|
2015-12-15 14:38:58 +08:00
|
|
|
"github.com/chrislusf/seaweedfs/go/util"
|
2011-12-16 22:51:26 +08:00
|
|
|
)
|
|
|
|
|
2012-12-21 16:36:55 +08:00
|
|
|
const (
|
2014-02-06 02:22:32 +08:00
|
|
|
NeedleHeaderSize = 16 //should never change this
|
|
|
|
NeedlePaddingSize = 8
|
|
|
|
NeedleChecksumSize = 4
|
|
|
|
MaxPossibleVolumeSize = 4 * 1024 * 1024 * 1024 * 8
|
2012-12-21 16:36:55 +08:00
|
|
|
)
|
|
|
|
|
2013-09-20 02:06:14 +08:00
|
|
|
/*
|
2014-12-26 15:36:33 +08:00
|
|
|
* A Needle means a uploaded and stored file.
|
2013-09-20 02:06:14 +08:00
|
|
|
* Needle file size is limited to 4GB for now.
|
|
|
|
*/
|
2011-12-19 13:59:37 +08:00
|
|
|
type Needle struct {
|
2013-02-11 01:44:44 +08:00
|
|
|
Cookie uint32 `comment:"random number to mitigate brute force lookups"`
|
|
|
|
Id uint64 `comment:"needle id"`
|
|
|
|
Size uint32 `comment:"sum of DataSize,Data,NameSize,Name,MimeSize,Mime"`
|
2012-12-21 18:13:02 +08:00
|
|
|
|
2013-07-09 14:38:38 +08:00
|
|
|
DataSize uint32 `comment:"Data size"` //version2
|
|
|
|
Data []byte `comment:"The actual file data"`
|
|
|
|
Flags byte `comment:"boolean flags"` //version2
|
|
|
|
NameSize uint8 //version2
|
|
|
|
Name []byte `comment:"maximum 256 characters"` //version2
|
|
|
|
MimeSize uint8 //version2
|
|
|
|
Mime []byte `comment:"maximum 256 characters"` //version2
|
|
|
|
LastModified uint64 //only store LastModifiedBytesLength bytes, which is 5 bytes to disk
|
2014-09-21 03:38:59 +08:00
|
|
|
Ttl *TTL
|
2012-12-21 18:13:02 +08:00
|
|
|
|
2013-02-11 01:44:44 +08:00
|
|
|
Checksum CRC `comment:"CRC32 to check integrity"`
|
|
|
|
Padding []byte `comment:"Aligned to 8 bytes"`
|
2011-12-16 22:51:26 +08:00
|
|
|
}
|
|
|
|
|
2014-12-26 13:29:44 +08:00
|
|
|
func (n *Needle) String() (str string) {
|
|
|
|
str = fmt.Sprintf("Cookie:%d, Id:%d, Size:%d, DataSize:%d, Name: %s, Mime: %s", n.Cookie, n.Id, n.Size, n.DataSize, n.Name, n.Mime)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-12-15 14:38:58 +08:00
|
|
|
func ParseUpload(r *http.Request) (
|
|
|
|
fileName string, data []byte, mimeType string, isGzipped bool,
|
|
|
|
modifiedTime uint64, ttl *TTL, isChunkedFile bool, e error) {
|
2011-12-19 13:59:37 +08:00
|
|
|
form, fe := r.MultipartReader()
|
|
|
|
if fe != nil {
|
2013-08-09 14:57:22 +08:00
|
|
|
glog.V(0).Infoln("MultipartReader [ERROR]", fe)
|
2012-07-03 14:46:26 +08:00
|
|
|
e = fe
|
|
|
|
return
|
2011-12-19 13:59:37 +08:00
|
|
|
}
|
2015-01-10 03:22:16 +08:00
|
|
|
|
2015-01-16 17:30:23 +08:00
|
|
|
//first multi-part item
|
|
|
|
part, fe := form.NextPart()
|
|
|
|
if fe != nil {
|
|
|
|
glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
|
|
|
|
e = fe
|
|
|
|
return
|
2012-11-20 16:42:45 +08:00
|
|
|
}
|
2015-01-10 03:22:16 +08:00
|
|
|
|
2013-08-07 02:23:24 +08:00
|
|
|
fileName = part.FileName()
|
|
|
|
if fileName != "" {
|
|
|
|
fileName = path.Base(fileName)
|
2013-02-27 06:56:18 +08:00
|
|
|
}
|
2013-08-14 15:31:02 +08:00
|
|
|
|
2013-08-07 02:23:24 +08:00
|
|
|
data, e = ioutil.ReadAll(part)
|
|
|
|
if e != nil {
|
2013-08-09 14:57:22 +08:00
|
|
|
glog.V(0).Infoln("Reading Content [ERROR]", e)
|
2013-08-06 04:37:41 +08:00
|
|
|
return
|
|
|
|
}
|
2015-01-16 17:30:23 +08:00
|
|
|
|
|
|
|
//if the filename is empty string, do a search on the other multi-part items
|
|
|
|
for fileName == "" {
|
|
|
|
part2, fe := form.NextPart()
|
|
|
|
if fe != nil {
|
|
|
|
break // no more or on error, just safely break
|
|
|
|
}
|
|
|
|
|
|
|
|
fName := part2.FileName()
|
|
|
|
|
|
|
|
//found the first <file type> multi-part has filename
|
|
|
|
if fName != "" {
|
|
|
|
data2, fe2 := ioutil.ReadAll(part2)
|
|
|
|
if fe2 != nil {
|
|
|
|
glog.V(0).Infoln("Reading Content [ERROR]", fe2)
|
|
|
|
e = fe2
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
//update
|
|
|
|
data = data2
|
|
|
|
fileName = path.Base(fName)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-07 02:23:24 +08:00
|
|
|
dotIndex := strings.LastIndex(fileName, ".")
|
2012-12-22 18:10:45 +08:00
|
|
|
ext, mtype := "", ""
|
2012-07-30 16:37:10 +08:00
|
|
|
if dotIndex > 0 {
|
2013-08-07 02:23:24 +08:00
|
|
|
ext = strings.ToLower(fileName[dotIndex:])
|
2012-12-22 18:10:45 +08:00
|
|
|
mtype = mime.TypeByExtension(ext)
|
|
|
|
}
|
|
|
|
contentType := part.Header.Get("Content-Type")
|
2013-08-07 02:23:24 +08:00
|
|
|
if contentType != "" && mtype != contentType {
|
|
|
|
mimeType = contentType //only return mime type if not deductable
|
2012-12-22 18:10:45 +08:00
|
|
|
mtype = contentType
|
|
|
|
}
|
2013-07-16 02:04:43 +08:00
|
|
|
if part.Header.Get("Content-Encoding") == "gzip" {
|
2013-08-07 02:23:24 +08:00
|
|
|
isGzipped = true
|
2015-12-02 21:27:29 +08:00
|
|
|
} else if operation.IsGzippable(ext, mtype) {
|
|
|
|
if data, e = operation.GzipData(data); e != nil {
|
2013-01-17 16:56:56 +08:00
|
|
|
return
|
|
|
|
}
|
2013-08-07 02:23:24 +08:00
|
|
|
isGzipped = true
|
2012-12-22 18:10:45 +08:00
|
|
|
}
|
|
|
|
if ext == ".gz" {
|
2013-08-07 02:23:24 +08:00
|
|
|
isGzipped = true
|
2012-12-22 18:10:45 +08:00
|
|
|
}
|
2015-02-26 13:26:45 +08:00
|
|
|
if strings.HasSuffix(fileName, ".gz") &&
|
|
|
|
!strings.HasSuffix(fileName, ".tar.gz") {
|
2013-08-07 02:23:24 +08:00
|
|
|
fileName = fileName[:len(fileName)-3]
|
|
|
|
}
|
|
|
|
modifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
|
2014-09-21 03:38:59 +08:00
|
|
|
ttl, _ = ReadTTL(r.FormValue("ttl"))
|
2015-12-02 15:00:46 +08:00
|
|
|
isChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
|
2013-09-02 14:58:21 +08:00
|
|
|
return
|
2013-08-07 02:23:24 +08:00
|
|
|
}
|
2014-05-15 16:56:08 +08:00
|
|
|
func NewNeedle(r *http.Request, fixJpgOrientation bool) (n *Needle, e error) {
|
2015-12-01 20:23:50 +08:00
|
|
|
fname, mimeType, isGzipped, isChunkedFile := "", "", false, false
|
2013-08-07 02:23:24 +08:00
|
|
|
n = new(Needle)
|
2015-12-01 20:23:50 +08:00
|
|
|
fname, n.Data, mimeType, isGzipped, n.LastModified, n.Ttl, isChunkedFile, e = ParseUpload(r)
|
2013-08-07 02:23:24 +08:00
|
|
|
if e != nil {
|
2013-09-02 14:58:21 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if len(fname) < 256 {
|
|
|
|
n.Name = []byte(fname)
|
|
|
|
n.SetHasName()
|
2013-08-07 02:23:24 +08:00
|
|
|
}
|
|
|
|
if len(mimeType) < 256 {
|
|
|
|
n.Mime = []byte(mimeType)
|
|
|
|
n.SetHasMime()
|
|
|
|
}
|
|
|
|
if isGzipped {
|
|
|
|
n.SetGzipped()
|
2012-07-30 16:37:10 +08:00
|
|
|
}
|
2013-08-07 02:23:24 +08:00
|
|
|
if n.LastModified == 0 {
|
2013-07-10 15:25:14 +08:00
|
|
|
n.LastModified = uint64(time.Now().Unix())
|
|
|
|
}
|
2013-10-16 23:39:09 +08:00
|
|
|
n.SetHasLastModifiedDate()
|
2014-09-21 11:51:24 +08:00
|
|
|
if n.Ttl != EMPTY_TTL {
|
2014-09-21 03:38:59 +08:00
|
|
|
n.SetHasTtl()
|
|
|
|
}
|
2012-12-22 18:10:45 +08:00
|
|
|
|
2015-12-01 20:23:50 +08:00
|
|
|
if isChunkedFile {
|
2015-12-15 14:38:58 +08:00
|
|
|
n.SetIsChunkManifest()
|
2015-12-01 20:23:50 +08:00
|
|
|
}
|
|
|
|
|
2014-05-16 16:10:46 +08:00
|
|
|
if fixJpgOrientation {
|
|
|
|
loweredName := strings.ToLower(fname)
|
|
|
|
if mimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
|
|
|
|
n.Data = images.FixJpgOrientation(n.Data)
|
|
|
|
}
|
2014-05-15 16:56:08 +08:00
|
|
|
}
|
|
|
|
|
2013-08-07 02:23:24 +08:00
|
|
|
n.Checksum = NewCRC(n.Data)
|
2011-12-16 22:51:26 +08:00
|
|
|
|
2012-06-29 15:53:47 +08:00
|
|
|
commaSep := strings.LastIndex(r.URL.Path, ",")
|
2011-12-22 12:04:47 +08:00
|
|
|
dotSep := strings.LastIndex(r.URL.Path, ".")
|
|
|
|
fid := r.URL.Path[commaSep+1:]
|
|
|
|
if dotSep > 0 {
|
2012-06-29 15:53:47 +08:00
|
|
|
fid = r.URL.Path[commaSep+1 : dotSep]
|
2011-12-22 12:04:47 +08:00
|
|
|
}
|
|
|
|
|
2013-12-10 05:53:24 +08:00
|
|
|
e = n.ParsePath(fid)
|
2011-12-19 13:59:37 +08:00
|
|
|
|
|
|
|
return
|
2011-12-16 22:51:26 +08:00
|
|
|
}
|
2013-12-10 05:53:24 +08:00
|
|
|
func (n *Needle) ParsePath(fid string) (err error) {
|
2011-12-22 12:04:47 +08:00
|
|
|
length := len(fid)
|
|
|
|
if length <= 8 {
|
2013-12-10 05:53:24 +08:00
|
|
|
return errors.New("Invalid fid:" + fid)
|
2011-12-20 17:00:01 +08:00
|
|
|
}
|
2012-07-17 01:15:16 +08:00
|
|
|
delta := ""
|
|
|
|
deltaIndex := strings.LastIndex(fid, "_")
|
|
|
|
if deltaIndex > 0 {
|
|
|
|
fid, delta = fid[0:deltaIndex], fid[deltaIndex+1:]
|
|
|
|
}
|
2013-12-10 05:53:24 +08:00
|
|
|
n.Id, n.Cookie, err = ParseKeyHash(fid)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2012-07-17 01:15:16 +08:00
|
|
|
if delta != "" {
|
2013-12-10 05:53:24 +08:00
|
|
|
if d, e := strconv.ParseUint(delta, 10, 64); e == nil {
|
2012-08-24 14:06:15 +08:00
|
|
|
n.Id += d
|
2013-12-10 05:53:24 +08:00
|
|
|
} else {
|
|
|
|
return e
|
2012-07-17 01:15:16 +08:00
|
|
|
}
|
|
|
|
}
|
2013-12-10 05:53:24 +08:00
|
|
|
return err
|
2011-12-16 22:51:26 +08:00
|
|
|
}
|
2013-01-21 11:44:23 +08:00
|
|
|
|
2013-12-10 05:53:24 +08:00
|
|
|
func ParseKeyHash(key_hash_string string) (uint64, uint32, error) {
|
2015-04-20 09:54:03 +08:00
|
|
|
if len(key_hash_string)%2 == 1 {
|
|
|
|
key_hash_string = "0" + key_hash_string
|
|
|
|
}
|
2012-12-21 16:36:55 +08:00
|
|
|
key_hash_bytes, khe := hex.DecodeString(key_hash_string)
|
|
|
|
key_hash_len := len(key_hash_bytes)
|
|
|
|
if khe != nil || key_hash_len <= 4 {
|
2013-08-09 14:57:22 +08:00
|
|
|
glog.V(0).Infoln("Invalid key_hash", key_hash_string, "length:", key_hash_len, "error", khe)
|
2013-12-10 05:53:24 +08:00
|
|
|
return 0, 0, errors.New("Invalid key and hash:" + key_hash_string)
|
2012-12-21 16:36:55 +08:00
|
|
|
}
|
|
|
|
key := util.BytesToUint64(key_hash_bytes[0 : key_hash_len-4])
|
|
|
|
hash := util.BytesToUint32(key_hash_bytes[key_hash_len-4 : key_hash_len])
|
2013-12-10 05:53:24 +08:00
|
|
|
return key, hash, nil
|
2011-12-22 12:04:47 +08:00
|
|
|
}
|