mutantmonkey
6 years ago
15 changed files with 414 additions and 365 deletions
-
145backends/localfs/localfs.go
-
5backends/meta.go
-
70backends/metajson/metajson.go
-
105backends/s3/s3.go
-
19backends/storage.go
-
19delete.go
-
27display.go
-
2expiry.go
-
37fileserve.go
-
70helpers/archive.go
-
67helpers/helpers.go
-
165meta.go
-
22server.go
-
4torrent.go
-
22upload.go
@ -1,70 +0,0 @@ |
|||
package metajson |
|||
|
|||
import ( |
|||
"bytes" |
|||
"encoding/json" |
|||
"time" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
) |
|||
|
|||
type MetadataJSON struct { |
|||
DeleteKey string `json:"delete_key"` |
|||
Sha256sum string `json:"sha256sum"` |
|||
Mimetype string `json:"mimetype"` |
|||
Size int64 `json:"size"` |
|||
Expiry int64 `json:"expiry"` |
|||
ArchiveFiles []string `json:"archive_files,omitempty"` |
|||
} |
|||
|
|||
type MetaJSONBackend struct { |
|||
storage backends.MetaStorageBackend |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Put(key string, metadata *backends.Metadata) error { |
|||
mjson := MetadataJSON{} |
|||
mjson.DeleteKey = metadata.DeleteKey |
|||
mjson.Mimetype = metadata.Mimetype |
|||
mjson.ArchiveFiles = metadata.ArchiveFiles |
|||
mjson.Sha256sum = metadata.Sha256sum |
|||
mjson.Expiry = metadata.Expiry.Unix() |
|||
mjson.Size = metadata.Size |
|||
|
|||
byt, err := json.Marshal(mjson) |
|||
if err != nil { |
|||
return err |
|||
} |
|||
|
|||
if _, err := m.storage.Put(key, bytes.NewBuffer(byt)); err != nil { |
|||
return err |
|||
} |
|||
|
|||
return nil |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Get(key string) (metadata backends.Metadata, err error) { |
|||
b, err := m.storage.Get(key) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
mjson := MetadataJSON{} |
|||
|
|||
err = json.Unmarshal(b, &mjson) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
metadata.DeleteKey = mjson.DeleteKey |
|||
metadata.Mimetype = mjson.Mimetype |
|||
metadata.ArchiveFiles = mjson.ArchiveFiles |
|||
metadata.Sha256sum = mjson.Sha256sum |
|||
metadata.Expiry = time.Unix(mjson.Expiry, 0) |
|||
metadata.Size = mjson.Size |
|||
|
|||
return |
|||
} |
|||
|
|||
func NewMetaJSONBackend(storage backends.MetaStorageBackend) MetaJSONBackend { |
|||
return MetaJSONBackend{storage: storage} |
|||
} |
@ -0,0 +1,70 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"io" |
|||
"sort" |
|||
) |
|||
|
|||
type ReadSeekerAt interface { |
|||
io.Reader |
|||
io.Seeker |
|||
io.ReaderAt |
|||
} |
|||
|
|||
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) { |
|||
if mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(r) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(r) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
} else if mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(r) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(r, size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
files = append(files, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
|
|||
return |
|||
} |
@ -0,0 +1,67 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"crypto/sha256" |
|||
"encoding/hex" |
|||
"io" |
|||
"unicode" |
|||
|
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
func DetectMime(r io.ReadSeeker) (string, error) { |
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
|
|||
r.Seek(0, 0) |
|||
r.Read(header) |
|||
r.Seek(0, 0) |
|||
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
return "application/octet-stream", err |
|||
} else if kind.MIME.Value != "" { |
|||
return kind.MIME.Value, nil |
|||
} |
|||
|
|||
// Check if the file seems anything like text
|
|||
if printable(header) { |
|||
return "text/plain", nil |
|||
} else { |
|||
return "application/octet-stream", nil |
|||
} |
|||
} |
|||
|
|||
func Sha256sum(r io.ReadSeeker) (string, error) { |
|||
hasher := sha256.New() |
|||
|
|||
r.Seek(0, 0) |
|||
_, err := io.Copy(hasher, r) |
|||
if err != nil { |
|||
return "", err |
|||
} |
|||
|
|||
r.Seek(0, 0) |
|||
|
|||
return hex.EncodeToString(hasher.Sum(nil)), nil |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
@ -1,165 +0,0 @@ |
|||
package main |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"crypto/sha256" |
|||
"encoding/hex" |
|||
"errors" |
|||
"io" |
|||
"sort" |
|||
"time" |
|||
"unicode" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/andreimarcu/linx-server/expiry" |
|||
"github.com/dchest/uniuri" |
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
var NotFoundErr = errors.New("File not found.") |
|||
|
|||
func generateMetadata(fName string, exp time.Time, delKey string) (m backends.Metadata, err error) { |
|||
file, err := fileBackend.Open(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
defer file.Close() |
|||
|
|||
m.Size, err = fileBackend.Size(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
m.Expiry = exp |
|||
|
|||
if delKey == "" { |
|||
m.DeleteKey = uniuri.NewLen(30) |
|||
} else { |
|||
m.DeleteKey = delKey |
|||
} |
|||
|
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
file.Read(header) |
|||
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
m.Mimetype = "application/octet-stream" |
|||
} else { |
|||
m.Mimetype = kind.MIME.Value |
|||
} |
|||
|
|||
if m.Mimetype == "" { |
|||
// Check if the file seems anything like text
|
|||
if printable(header) { |
|||
m.Mimetype = "text/plain" |
|||
} else { |
|||
m.Mimetype = "application/octet-stream" |
|||
} |
|||
} |
|||
|
|||
// Compute the sha256sum
|
|||
hasher := sha256.New() |
|||
file.Seek(0, 0) |
|||
_, err = io.Copy(hasher, file) |
|||
if err == nil { |
|||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil)) |
|||
} |
|||
file.Seek(0, 0) |
|||
|
|||
// If archive, grab list of filenames
|
|||
if m.Mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(file) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(file) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
} else if m.Mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(file) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(file, m.Size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func metadataWrite(filename string, metadata *backends.Metadata) error { |
|||
return metaBackend.Put(filename, metadata) |
|||
} |
|||
|
|||
func metadataRead(filename string) (metadata backends.Metadata, err error) { |
|||
metadata, err = metaBackend.Get(filename) |
|||
if err != nil { |
|||
// Metadata does not exist, generate one
|
|||
newMData, err := generateMetadata(filename, expiry.NeverExpire, "") |
|||
if err != nil { |
|||
return metadata, err |
|||
} |
|||
metadataWrite(filename, &newMData) |
|||
|
|||
metadata, err = metaBackend.Get(filename) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue