You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

264 lines
6.5 KiB

12 years ago
12 years ago
12 years ago
12 years ago
9 years ago
9 years ago
9 years ago
12 years ago
  1. package storage
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "io/ioutil"
  6. "math"
  7. "mime"
  8. "net/http"
  9. "path"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "github.com/chrislusf/seaweedfs/weed/glog"
  14. "github.com/chrislusf/seaweedfs/weed/images"
  15. "github.com/chrislusf/seaweedfs/weed/operation"
  16. )
  17. const (
  18. NeedleHeaderSize = 16 //should never change this
  19. NeedlePaddingSize = 8
  20. NeedleChecksumSize = 4
  21. MaxPossibleVolumeSize = 4 * 1024 * 1024 * 1024 * 8
  22. TombstoneFileSize = math.MaxUint32
  23. PairNamePrefix = "Seaweed-"
  24. )
  25. /*
  26. * A Needle means a uploaded and stored file.
  27. * Needle file size is limited to 4GB for now.
  28. */
  29. type Needle struct {
  30. Cookie uint32 `comment:"random number to mitigate brute force lookups"`
  31. Id uint64 `comment:"needle id"`
  32. Size uint32 `comment:"sum of DataSize,Data,NameSize,Name,MimeSize,Mime"`
  33. DataSize uint32 `comment:"Data size"` //version2
  34. Data []byte `comment:"The actual file data"`
  35. Flags byte `comment:"boolean flags"` //version2
  36. NameSize uint8 //version2
  37. Name []byte `comment:"maximum 256 characters"` //version2
  38. MimeSize uint8 //version2
  39. Mime []byte `comment:"maximum 256 characters"` //version2
  40. PairsSize uint16 //version2
  41. Pairs []byte `comment:"additional name value pairs, json format, maximum 64kB"`
  42. LastModified uint64 //only store LastModifiedBytesLength bytes, which is 5 bytes to disk
  43. Ttl *TTL
  44. Checksum CRC `comment:"CRC32 to check integrity"`
  45. Padding []byte `comment:"Aligned to 8 bytes"`
  46. rawBlock *Block // underlying supporing []byte, fetched and released into a pool
  47. }
  48. func (n *Needle) String() (str string) {
  49. str = fmt.Sprintf("Cookie:%d, Id:%d, Size:%d, DataSize:%d, Name: %s, Mime: %s", n.Cookie, n.Id, n.Size, n.DataSize, n.Name, n.Mime)
  50. return
  51. }
  52. func ParseUpload(r *http.Request) (
  53. fileName string, data []byte, mimeType string, pairMap map[string]string, isGzipped bool,
  54. modifiedTime uint64, ttl *TTL, isChunkedFile bool, e error) {
  55. pairMap = make(map[string]string)
  56. for k, v := range r.Header {
  57. if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
  58. pairMap[k] = v[0]
  59. }
  60. }
  61. form, fe := r.MultipartReader()
  62. if fe != nil {
  63. glog.V(0).Infoln("MultipartReader [ERROR]", fe)
  64. e = fe
  65. return
  66. }
  67. //first multi-part item
  68. part, fe := form.NextPart()
  69. if fe != nil {
  70. glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
  71. e = fe
  72. return
  73. }
  74. fileName = part.FileName()
  75. if fileName != "" {
  76. fileName = path.Base(fileName)
  77. }
  78. data, e = ioutil.ReadAll(part)
  79. if e != nil {
  80. glog.V(0).Infoln("Reading Content [ERROR]", e)
  81. return
  82. }
  83. //if the filename is empty string, do a search on the other multi-part items
  84. for fileName == "" {
  85. part2, fe := form.NextPart()
  86. if fe != nil {
  87. break // no more or on error, just safely break
  88. }
  89. fName := part2.FileName()
  90. //found the first <file type> multi-part has filename
  91. if fName != "" {
  92. data2, fe2 := ioutil.ReadAll(part2)
  93. if fe2 != nil {
  94. glog.V(0).Infoln("Reading Content [ERROR]", fe2)
  95. e = fe2
  96. return
  97. }
  98. //update
  99. data = data2
  100. fileName = path.Base(fName)
  101. break
  102. }
  103. }
  104. isChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
  105. if !isChunkedFile {
  106. dotIndex := strings.LastIndex(fileName, ".")
  107. ext, mtype := "", ""
  108. if dotIndex > 0 {
  109. ext = strings.ToLower(fileName[dotIndex:])
  110. mtype = mime.TypeByExtension(ext)
  111. }
  112. contentType := part.Header.Get("Content-Type")
  113. if contentType != "" && mtype != contentType {
  114. mimeType = contentType //only return mime type if not deductable
  115. mtype = contentType
  116. }
  117. if part.Header.Get("Content-Encoding") == "gzip" {
  118. isGzipped = true
  119. } else if operation.IsGzippable(ext, mtype) {
  120. if data, e = operation.GzipData(data); e != nil {
  121. return
  122. }
  123. isGzipped = true
  124. }
  125. if ext == ".gz" {
  126. isGzipped = true
  127. }
  128. if strings.HasSuffix(fileName, ".gz") &&
  129. !strings.HasSuffix(fileName, ".tar.gz") {
  130. fileName = fileName[:len(fileName)-3]
  131. }
  132. }
  133. modifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
  134. ttl, _ = ReadTTL(r.FormValue("ttl"))
  135. return
  136. }
  137. func NewNeedle(r *http.Request, fixJpgOrientation bool) (n *Needle, e error) {
  138. var pairMap map[string]string
  139. fname, mimeType, isGzipped, isChunkedFile := "", "", false, false
  140. n = new(Needle)
  141. fname, n.Data, mimeType, pairMap, isGzipped, n.LastModified, n.Ttl, isChunkedFile, e = ParseUpload(r)
  142. if e != nil {
  143. return
  144. }
  145. if len(fname) < 256 {
  146. n.Name = []byte(fname)
  147. n.SetHasName()
  148. }
  149. if len(mimeType) < 256 {
  150. n.Mime = []byte(mimeType)
  151. n.SetHasMime()
  152. }
  153. if len(pairMap) != 0 {
  154. trimmedPairMap := make(map[string]string)
  155. for k, v := range pairMap {
  156. trimmedPairMap[k[len(PairNamePrefix):]] = v
  157. }
  158. pairs, _ := json.Marshal(trimmedPairMap)
  159. if len(pairs) < 65536 {
  160. n.Pairs = pairs
  161. n.PairsSize = uint16(len(pairs))
  162. n.SetHasPairs()
  163. }
  164. }
  165. if isGzipped {
  166. n.SetGzipped()
  167. }
  168. if n.LastModified == 0 {
  169. n.LastModified = uint64(time.Now().Unix())
  170. }
  171. n.SetHasLastModifiedDate()
  172. if n.Ttl != EMPTY_TTL {
  173. n.SetHasTtl()
  174. }
  175. if isChunkedFile {
  176. n.SetIsChunkManifest()
  177. }
  178. if fixJpgOrientation {
  179. loweredName := strings.ToLower(fname)
  180. if mimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
  181. n.Data = images.FixJpgOrientation(n.Data)
  182. }
  183. }
  184. n.Checksum = NewCRC(n.Data)
  185. commaSep := strings.LastIndex(r.URL.Path, ",")
  186. dotSep := strings.LastIndex(r.URL.Path, ".")
  187. fid := r.URL.Path[commaSep+1:]
  188. if dotSep > 0 {
  189. fid = r.URL.Path[commaSep+1 : dotSep]
  190. }
  191. e = n.ParsePath(fid)
  192. return
  193. }
  194. func (n *Needle) ParsePath(fid string) (err error) {
  195. length := len(fid)
  196. if length <= 8 {
  197. return fmt.Errorf("Invalid fid: %s", fid)
  198. }
  199. delta := ""
  200. deltaIndex := strings.LastIndex(fid, "_")
  201. if deltaIndex > 0 {
  202. fid, delta = fid[0:deltaIndex], fid[deltaIndex+1:]
  203. }
  204. n.Id, n.Cookie, err = ParseKeyHash(fid)
  205. if err != nil {
  206. return err
  207. }
  208. if delta != "" {
  209. if d, e := strconv.ParseUint(delta, 10, 64); e == nil {
  210. n.Id += d
  211. } else {
  212. return e
  213. }
  214. }
  215. return err
  216. }
  217. func ParseKeyHash(key_hash_string string) (uint64, uint32, error) {
  218. if len(key_hash_string) <= 8 {
  219. return 0, 0, fmt.Errorf("KeyHash is too short.")
  220. }
  221. if len(key_hash_string) > 24 {
  222. return 0, 0, fmt.Errorf("KeyHash is too long.")
  223. }
  224. split := len(key_hash_string) - 8
  225. key, err := strconv.ParseUint(key_hash_string[:split], 16, 64)
  226. if err != nil {
  227. return 0, 0, fmt.Errorf("Parse key error: %v", err)
  228. }
  229. hash, err := strconv.ParseUint(key_hash_string[split:], 16, 32)
  230. if err != nil {
  231. return 0, 0, fmt.Errorf("Parse hash error: %v", err)
  232. }
  233. return key, uint32(hash), nil
  234. }