mutantmonkey
6 years ago
committed by
Andrei Marcu
21 changed files with 737 additions and 440 deletions
-
2Dockerfile
-
141backends/localfs/localfs.go
-
5backends/meta.go
-
70backends/metajson/metajson.go
-
192backends/s3/s3.go
-
20backends/storage.go
-
19delete.go
-
32display.go
-
2expiry.go
-
37fileserve.go
-
70helpers/archive.go
-
67helpers/helpers.go
-
10linx-cleanup/cleanup.go
-
165meta.go
-
29pages.go
-
22server.go
-
80server_test.go
-
64torrent.go
-
28torrent/torrent.go
-
5torrent_test.go
-
95upload.go
@ -1,70 +0,0 @@ |
|||
package metajson |
|||
|
|||
import ( |
|||
"bytes" |
|||
"encoding/json" |
|||
"time" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
) |
|||
|
|||
type MetadataJSON struct { |
|||
DeleteKey string `json:"delete_key"` |
|||
Sha256sum string `json:"sha256sum"` |
|||
Mimetype string `json:"mimetype"` |
|||
Size int64 `json:"size"` |
|||
Expiry int64 `json:"expiry"` |
|||
ArchiveFiles []string `json:"archive_files,omitempty"` |
|||
} |
|||
|
|||
type MetaJSONBackend struct { |
|||
storage backends.MetaStorageBackend |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Put(key string, metadata *backends.Metadata) error { |
|||
mjson := MetadataJSON{} |
|||
mjson.DeleteKey = metadata.DeleteKey |
|||
mjson.Mimetype = metadata.Mimetype |
|||
mjson.ArchiveFiles = metadata.ArchiveFiles |
|||
mjson.Sha256sum = metadata.Sha256sum |
|||
mjson.Expiry = metadata.Expiry.Unix() |
|||
mjson.Size = metadata.Size |
|||
|
|||
byt, err := json.Marshal(mjson) |
|||
if err != nil { |
|||
return err |
|||
} |
|||
|
|||
if _, err := m.storage.Put(key, bytes.NewBuffer(byt)); err != nil { |
|||
return err |
|||
} |
|||
|
|||
return nil |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Get(key string) (metadata backends.Metadata, err error) { |
|||
b, err := m.storage.Get(key) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
mjson := MetadataJSON{} |
|||
|
|||
err = json.Unmarshal(b, &mjson) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
metadata.DeleteKey = mjson.DeleteKey |
|||
metadata.Mimetype = mjson.Mimetype |
|||
metadata.ArchiveFiles = mjson.ArchiveFiles |
|||
metadata.Sha256sum = mjson.Sha256sum |
|||
metadata.Expiry = time.Unix(mjson.Expiry, 0) |
|||
metadata.Size = mjson.Size |
|||
|
|||
return |
|||
} |
|||
|
|||
func NewMetaJSONBackend(storage backends.MetaStorageBackend) MetaJSONBackend { |
|||
return MetaJSONBackend{storage: storage} |
|||
} |
@ -0,0 +1,192 @@ |
|||
package s3 |
|||
|
|||
import ( |
|||
"io" |
|||
"io/ioutil" |
|||
"os" |
|||
"strconv" |
|||
"time" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/andreimarcu/linx-server/helpers" |
|||
"github.com/aws/aws-sdk-go/aws" |
|||
"github.com/aws/aws-sdk-go/aws/awserr" |
|||
"github.com/aws/aws-sdk-go/aws/session" |
|||
"github.com/aws/aws-sdk-go/service/s3" |
|||
"github.com/aws/aws-sdk-go/service/s3/s3manager" |
|||
) |
|||
|
|||
type S3Backend struct { |
|||
bucket string |
|||
svc *s3.S3 |
|||
} |
|||
|
|||
func (b S3Backend) Delete(key string) error { |
|||
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
return err |
|||
} |
|||
return nil |
|||
} |
|||
|
|||
func (b S3Backend) Exists(key string) (bool, error) { |
|||
_, err := b.svc.HeadObject(&s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
return err == nil, err |
|||
} |
|||
|
|||
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) { |
|||
var result *s3.HeadObjectOutput |
|||
result, err = b.svc.HeadObject(&s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
if aerr, ok := err.(awserr.Error); ok { |
|||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
|||
err = backends.NotFoundErr |
|||
} |
|||
} |
|||
return |
|||
} |
|||
|
|||
metadata, err = unmapMetadata(result.Metadata) |
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) { |
|||
var result *s3.GetObjectOutput |
|||
result, err = b.svc.GetObject(&s3.GetObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
if aerr, ok := err.(awserr.Error); ok { |
|||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
|||
err = backends.NotFoundErr |
|||
} |
|||
} |
|||
return |
|||
} |
|||
|
|||
metadata, err = unmapMetadata(result.Metadata) |
|||
r = result.Body |
|||
return |
|||
} |
|||
|
|||
func mapMetadata(m backends.Metadata) map[string]*string { |
|||
return map[string]*string{ |
|||
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)), |
|||
"Delete_key": aws.String(m.DeleteKey), |
|||
"Size": aws.String(strconv.FormatInt(m.Size, 10)), |
|||
"Mimetype": aws.String(m.Mimetype), |
|||
"Sha256sum": aws.String(m.Sha256sum), |
|||
} |
|||
} |
|||
|
|||
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) { |
|||
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64) |
|||
if err != nil { |
|||
return m, err |
|||
} |
|||
m.Expiry = time.Unix(expiry, 0) |
|||
|
|||
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
m.DeleteKey = aws.StringValue(input["Delete_key"]) |
|||
m.Mimetype = aws.StringValue(input["Mimetype"]) |
|||
m.Sha256sum = aws.StringValue(input["Sha256sum"]) |
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) { |
|||
tmpDst, err := ioutil.TempFile("", "linx-server-upload") |
|||
if err != nil { |
|||
return m, err |
|||
} |
|||
defer tmpDst.Close() |
|||
defer os.Remove(tmpDst.Name()) |
|||
|
|||
bytes, err := io.Copy(tmpDst, r) |
|||
if bytes == 0 { |
|||
return m, backends.FileEmptyError |
|||
} else if err != nil { |
|||
return m, err |
|||
} |
|||
|
|||
m.Expiry = expiry |
|||
m.DeleteKey = deleteKey |
|||
m.Size = bytes |
|||
m.Mimetype, _ = helpers.DetectMime(tmpDst) |
|||
m.Sha256sum, _ = helpers.Sha256sum(tmpDst) |
|||
// XXX: we may not be able to write this to AWS easily
|
|||
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
|
|||
|
|||
uploader := s3manager.NewUploaderWithClient(b.svc) |
|||
input := &s3manager.UploadInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
Body: tmpDst, |
|||
Metadata: mapMetadata(m), |
|||
} |
|||
_, err = uploader.Upload(input) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Size(key string) (int64, error) { |
|||
input := &s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
} |
|||
result, err := b.svc.HeadObject(input) |
|||
if err != nil { |
|||
return 0, err |
|||
} |
|||
|
|||
return *result.ContentLength, nil |
|||
} |
|||
|
|||
func (b S3Backend) List() ([]string, error) { |
|||
var output []string |
|||
input := &s3.ListObjectsInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
} |
|||
|
|||
results, err := b.svc.ListObjects(input) |
|||
if err != nil { |
|||
return nil, err |
|||
} |
|||
|
|||
|
|||
for _, object := range results.Contents { |
|||
output = append(output, *object.Key) |
|||
} |
|||
|
|||
return output, nil |
|||
} |
|||
|
|||
func NewS3Backend(bucket string, region string, endpoint string) S3Backend { |
|||
awsConfig := &aws.Config{} |
|||
if region != "" { |
|||
awsConfig.Region = aws.String(region) |
|||
} |
|||
if endpoint != "" { |
|||
awsConfig.Endpoint = aws.String(endpoint) |
|||
} |
|||
|
|||
sess := session.Must(session.NewSession(awsConfig)) |
|||
svc := s3.New(sess) |
|||
return S3Backend{bucket: bucket, svc: svc} |
|||
} |
@ -0,0 +1,70 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"io" |
|||
"sort" |
|||
) |
|||
|
|||
type ReadSeekerAt interface { |
|||
io.Reader |
|||
io.Seeker |
|||
io.ReaderAt |
|||
} |
|||
|
|||
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) { |
|||
if mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(r) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(r) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
} else if mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(r) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(r, size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
files = append(files, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
|
|||
return |
|||
} |
@ -0,0 +1,67 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"encoding/hex" |
|||
"io" |
|||
"unicode" |
|||
|
|||
"github.com/minio/sha256-simd" |
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
func DetectMime(r io.ReadSeeker) (string, error) { |
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
|
|||
r.Seek(0, 0) |
|||
r.Read(header) |
|||
r.Seek(0, 0) |
|||
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
return "application/octet-stream", err |
|||
} else if kind.MIME.Value != "" { |
|||
return kind.MIME.Value, nil |
|||
} |
|||
|
|||
// Check if the file seems anything like text
|
|||
if printable(header) { |
|||
return "text/plain", nil |
|||
} else { |
|||
return "application/octet-stream", nil |
|||
} |
|||
} |
|||
|
|||
func Sha256sum(r io.ReadSeeker) (string, error) { |
|||
hasher := sha256.New() |
|||
|
|||
r.Seek(0, 0) |
|||
_, err := io.Copy(hasher, r) |
|||
if err != nil { |
|||
return "", err |
|||
} |
|||
|
|||
r.Seek(0, 0) |
|||
|
|||
return hex.EncodeToString(hasher.Sum(nil)), nil |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
@ -1,165 +0,0 @@ |
|||
package main |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"encoding/hex" |
|||
"errors" |
|||
"io" |
|||
"sort" |
|||
"time" |
|||
"unicode" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/andreimarcu/linx-server/expiry" |
|||
"github.com/dchest/uniuri" |
|||
"github.com/minio/sha256-simd" |
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
var NotFoundErr = errors.New("File not found.") |
|||
|
|||
func generateMetadata(fName string, exp time.Time, delKey string) (m backends.Metadata, err error) { |
|||
file, err := fileBackend.Open(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
defer file.Close() |
|||
|
|||
m.Size, err = fileBackend.Size(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
m.Expiry = exp |
|||
|
|||
if delKey == "" { |
|||
m.DeleteKey = uniuri.NewLen(30) |
|||
} else { |
|||
m.DeleteKey = delKey |
|||
} |
|||
|
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
file.Read(header) |
|||
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
m.Mimetype = "application/octet-stream" |
|||
} else { |
|||
m.Mimetype = kind.MIME.Value |
|||
} |
|||
|
|||
if m.Mimetype == "" { |
|||
// Check if the file seems anything like text
|
|||
if printable(header) { |
|||
m.Mimetype = "text/plain" |
|||
} else { |
|||
m.Mimetype = "application/octet-stream" |
|||
} |
|||
} |
|||
|
|||
// Compute the sha256sum
|
|||
hasher := sha256.New() |
|||
file.Seek(0, 0) |
|||
_, err = io.Copy(hasher, file) |
|||
if err == nil { |
|||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil)) |
|||
} |
|||
file.Seek(0, 0) |
|||
|
|||
// If archive, grab list of filenames
|
|||
if m.Mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(file) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(file) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
} else if m.Mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(file) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(file, m.Size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func metadataWrite(filename string, metadata *backends.Metadata) error { |
|||
return metaBackend.Put(filename, metadata) |
|||
} |
|||
|
|||
func metadataRead(filename string) (metadata backends.Metadata, err error) { |
|||
metadata, err = metaBackend.Get(filename) |
|||
if err != nil { |
|||
// Metadata does not exist, generate one
|
|||
newMData, err := generateMetadata(filename, expiry.NeverExpire, "") |
|||
if err != nil { |
|||
return metadata, err |
|||
} |
|||
metadataWrite(filename, &newMData) |
|||
|
|||
metadata, err = metaBackend.Get(filename) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
@ -0,0 +1,28 @@ |
|||
package torrent |
|||
|
|||
import ( |
|||
"crypto/sha1" |
|||
) |
|||
|
|||
const ( |
|||
TORRENT_PIECE_LENGTH = 262144 |
|||
) |
|||
|
|||
type TorrentInfo struct { |
|||
PieceLength int `bencode:"piece length"` |
|||
Pieces string `bencode:"pieces"` |
|||
Name string `bencode:"name"` |
|||
Length int `bencode:"length"` |
|||
} |
|||
|
|||
type Torrent struct { |
|||
Encoding string `bencode:"encoding"` |
|||
Info TorrentInfo `bencode:"info"` |
|||
UrlList []string `bencode:"url-list"` |
|||
} |
|||
|
|||
func HashPiece(piece []byte) []byte { |
|||
h := sha1.New() |
|||
h.Write(piece) |
|||
return h.Sum(nil) |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue