seaweedfs/weed/storage/needle/needle.go

195 lines
5.3 KiB
Go
Raw Normal View History

2019-04-19 12:43:36 +08:00
package needle
import (
"encoding/json"
"fmt"
2020-01-03 16:37:24 +08:00
"io"
"io/ioutil"
"net/http"
"strconv"
"strings"
"time"
"github.com/chrislusf/seaweedfs/weed/images"
. "github.com/chrislusf/seaweedfs/weed/storage/types"
)
2012-12-21 16:36:55 +08:00
const (
NeedleChecksumSize = 4
PairNamePrefix = "Seaweed-"
2012-12-21 16:36:55 +08:00
)
/*
* A Needle means a uploaded and stored file.
* Needle file size is limited to 4GB for now.
*/
type Needle struct {
Cookie Cookie `comment:"random number to mitigate brute force lookups"`
Id NeedleId `comment:"needle id"`
Size uint32 `comment:"sum of DataSize,Data,NameSize,Name,MimeSize,Mime"`
DataSize uint32 `comment:"Data size"` //version2
Data []byte `comment:"The actual file data"`
2019-01-17 09:17:19 +08:00
Flags byte `comment:"boolean flags"` //version2
NameSize uint8 //version2
Name []byte `comment:"maximum 256 characters"` //version2
2019-01-17 09:17:19 +08:00
MimeSize uint8 //version2
Mime []byte `comment:"maximum 256 characters"` //version2
2019-01-17 09:17:19 +08:00
PairsSize uint16 //version2
Pairs []byte `comment:"additional name value pairs, json format, maximum 64kB"`
LastModified uint64 //only store LastModifiedBytesLength bytes, which is 5 bytes to disk
Ttl *TTL
2018-07-24 16:36:04 +08:00
Checksum CRC `comment:"CRC32 to check integrity"`
AppendAtNs uint64 `comment:"append timestamp in nano seconds"` //version3
Padding []byte `comment:"Aligned to 8 bytes"`
}
func (n *Needle) String() (str string) {
2018-07-22 08:41:21 +08:00
str = fmt.Sprintf("%s Size:%d, DataSize:%d, Name:%s, Mime:%s", formatNeedleIdCookie(n.Id, n.Cookie), n.Size, n.DataSize, n.Name, n.Mime)
return
}
2020-01-03 16:37:24 +08:00
func ParseUpload(r *http.Request, sizeLimit int64) (
fileName string, data []byte, mimeType string, pairMap map[string]string, isGzipped bool, originalDataSize int,
2015-12-15 14:38:58 +08:00
modifiedTime uint64, ttl *TTL, isChunkedFile bool, e error) {
pairMap = make(map[string]string)
for k, v := range r.Header {
if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
pairMap[k] = v[0]
}
}
2018-07-22 06:58:48 +08:00
if r.Method == "POST" {
2020-01-03 16:37:24 +08:00
fileName, data, mimeType, isGzipped, originalDataSize, isChunkedFile, e = parseMultipart(r, sizeLimit)
2018-07-22 06:58:48 +08:00
} else {
isGzipped = false
mimeType = r.Header.Get("Content-Type")
fileName = ""
2020-01-03 16:37:24 +08:00
data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
originalDataSize = len(data)
2020-01-03 16:37:24 +08:00
if e == io.EOF || int64(originalDataSize) == sizeLimit+1 {
io.Copy(ioutil.Discard, r.Body)
}
r.Body.Close()
2018-07-22 06:58:48 +08:00
}
if e != nil {
2013-08-06 04:37:41 +08:00
return
}
2015-01-16 17:30:23 +08:00
modifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
ttl, _ = ReadTTL(r.FormValue("ttl"))
return
}
2020-01-03 16:37:24 +08:00
func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit int64) (n *Needle, originalSize int, e error) {
var pairMap map[string]string
fname, mimeType, isGzipped, isChunkedFile := "", "", false, false
n = new(Needle)
2020-01-03 16:37:24 +08:00
fname, n.Data, mimeType, pairMap, isGzipped, originalSize, n.LastModified, n.Ttl, isChunkedFile, e = ParseUpload(r, sizeLimit)
if e != nil {
return
}
if len(fname) < 256 {
n.Name = []byte(fname)
n.SetHasName()
}
if len(mimeType) < 256 {
n.Mime = []byte(mimeType)
n.SetHasMime()
}
if len(pairMap) != 0 {
trimmedPairMap := make(map[string]string)
for k, v := range pairMap {
trimmedPairMap[k[len(PairNamePrefix):]] = v
}
pairs, _ := json.Marshal(trimmedPairMap)
if len(pairs) < 65536 {
n.Pairs = pairs
n.PairsSize = uint16(len(pairs))
n.SetHasPairs()
}
}
if isGzipped {
n.SetGzipped()
}
if n.LastModified == 0 {
n.LastModified = uint64(time.Now().Unix())
}
n.SetHasLastModifiedDate()
2014-09-21 11:51:24 +08:00
if n.Ttl != EMPTY_TTL {
n.SetHasTtl()
}
if isChunkedFile {
2015-12-15 14:38:58 +08:00
n.SetIsChunkManifest()
}
2014-05-16 16:10:46 +08:00
if fixJpgOrientation {
loweredName := strings.ToLower(fname)
if mimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
n.Data = images.FixJpgOrientation(n.Data)
}
}
n.Checksum = NewCRC(n.Data)
commaSep := strings.LastIndex(r.URL.Path, ",")
dotSep := strings.LastIndex(r.URL.Path, ".")
fid := r.URL.Path[commaSep+1:]
if dotSep > 0 {
2018-07-24 16:38:08 +08:00
fid = r.URL.Path[commaSep+1 : dotSep]
}
e = n.ParsePath(fid)
return
}
func (n *Needle) ParsePath(fid string) (err error) {
length := len(fid)
if length <= CookieSize*2 {
2016-04-10 16:50:58 +08:00
return fmt.Errorf("Invalid fid: %s", fid)
}
delta := ""
deltaIndex := strings.LastIndex(fid, "_")
if deltaIndex > 0 {
fid, delta = fid[0:deltaIndex], fid[deltaIndex+1:]
}
n.Id, n.Cookie, err = ParseNeedleIdCookie(fid)
if err != nil {
return err
}
if delta != "" {
if d, e := strconv.ParseUint(delta, 10, 64); e == nil {
2019-06-21 16:14:10 +08:00
n.Id += Uint64ToNeedleId(d)
} else {
return e
}
}
return err
}
func ParseNeedleIdCookie(key_hash_string string) (NeedleId, Cookie, error) {
if len(key_hash_string) <= CookieSize*2 {
2018-08-01 14:25:26 +08:00
return NeedleIdEmpty, 0, fmt.Errorf("KeyHash is too short.")
2012-12-21 16:36:55 +08:00
}
if len(key_hash_string) > (NeedleIdSize+CookieSize)*2 {
2018-08-01 14:25:26 +08:00
return NeedleIdEmpty, 0, fmt.Errorf("KeyHash is too long.")
2016-04-10 15:54:40 +08:00
}
split := len(key_hash_string) - CookieSize*2
needleId, err := ParseNeedleId(key_hash_string[:split])
2016-04-10 15:54:40 +08:00
if err != nil {
2018-08-01 14:25:26 +08:00
return NeedleIdEmpty, 0, fmt.Errorf("Parse needleId error: %v", err)
2016-04-10 15:54:40 +08:00
}
cookie, err := ParseCookie(key_hash_string[split:])
2016-04-10 15:54:40 +08:00
if err != nil {
2018-08-01 14:25:26 +08:00
return NeedleIdEmpty, 0, fmt.Errorf("Parse cookie error: %v", err)
2016-04-10 15:54:40 +08:00
}
return needleId, cookie, nil
2016-04-10 15:54:40 +08:00
}
func (n *Needle) LastModifiedString() string {
return time.Unix(int64(n.LastModified), 0).Format("2006-01-02T15:04:05")
}