51 changed files with 1826 additions and 1007 deletions
-
2.gitignore
-
4.travis.yml
-
23Dockerfile
-
42README.md
-
23backends/backends.go
-
156backends/localfs/localfs.go
-
17backends/meta.go
-
195backends/s3/s3.go
-
24backends/storage.go
-
117build.sh
-
11csp.go
-
12csp_test.go
-
19delete.go
-
72display.go
-
25expiry.go
-
13expiry/expiry.go
-
62fileserve.go
-
70helpers/archive.go
-
67helpers/helpers.go
-
27httputil/LICENSE
-
218httputil/conditional.go
-
46linx-cleanup/cleanup.go
-
222meta.go
-
46pages.go
-
67server.go
-
206server_test.go
-
89shorturl.go
-
10static/css/dropzone.css
-
3static/css/github-markdown.css
-
205static/css/linx.css
-
6static/js/bin.js
-
39static/js/shorturl.js
-
171static/js/upload.js
-
1templates.go
-
4templates/404.html
-
16templates/API.html
-
3templates/base.html
-
4templates/display/audio.html
-
22templates/display/base.html
-
37templates/display/bin.html
-
4templates/display/file.html
-
4templates/display/image.html
-
4templates/display/pdf.html
-
34templates/display/story.html
-
4templates/display/video.html
-
4templates/index.html
-
20templates/paste.html
-
67torrent.go
-
28torrent/torrent.go
-
7torrent_test.go
-
143upload.go
@ -1,8 +1,8 @@ |
|||||
language: go |
language: go |
||||
|
|
||||
go: |
go: |
||||
- 1.5 |
|
||||
- 1.6 |
|
||||
|
- "1.10" |
||||
|
- "1.11" |
||||
|
|
||||
before_script: |
before_script: |
||||
- go vet ./... |
- go vet ./... |
||||
|
@ -1,13 +1,28 @@ |
|||||
FROM golang:alpine |
|
||||
|
FROM golang:alpine3.8 AS build |
||||
|
|
||||
|
COPY . /go/src/github.com/andreimarcu/linx-server |
||||
|
WORKDIR /go/src/github.com/andreimarcu/linx-server |
||||
|
|
||||
RUN set -ex \ |
RUN set -ex \ |
||||
&& apk add --no-cache --virtual .build-deps git mercurial \ |
|
||||
&& go get github.com/andreimarcu/linx-server \ |
|
||||
|
&& apk add --no-cache --virtual .build-deps git \ |
||||
|
&& go get -v . \ |
||||
&& apk del .build-deps |
&& apk del .build-deps |
||||
|
|
||||
|
FROM alpine:3.8 |
||||
|
|
||||
|
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server |
||||
|
|
||||
|
ENV GOPATH /go |
||||
|
ENV SSL_CERT_FILE /etc/ssl/cert.pem |
||||
|
|
||||
|
COPY static /go/src/github.com/andreimarcu/linx-server/static/ |
||||
|
COPY templates /go/src/github.com/andreimarcu/linx-server/templates/ |
||||
|
|
||||
|
RUN mkdir -p /data/files && mkdir -p /data/meta && chown -R 65534:65534 /data |
||||
|
|
||||
VOLUME ["/data/files", "/data/meta"] |
VOLUME ["/data/files", "/data/meta"] |
||||
|
|
||||
EXPOSE 8080 |
EXPOSE 8080 |
||||
USER nobody |
USER nobody |
||||
ENTRYPOINT ["/go/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"] |
|
||||
|
ENTRYPOINT ["/usr/local/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"] |
||||
CMD ["-sitename=linx", "-allowhotlink"] |
CMD ["-sitename=linx", "-allowhotlink"] |
@ -1,23 +0,0 @@ |
|||||
package backends |
|
||||
|
|
||||
import ( |
|
||||
"io" |
|
||||
"net/http" |
|
||||
) |
|
||||
|
|
||||
type ReadSeekCloser interface { |
|
||||
io.Reader |
|
||||
io.Closer |
|
||||
io.Seeker |
|
||||
io.ReaderAt |
|
||||
} |
|
||||
|
|
||||
type StorageBackend interface { |
|
||||
Delete(key string) error |
|
||||
Exists(key string) (bool, error) |
|
||||
Get(key string) ([]byte, error) |
|
||||
Put(key string, r io.Reader) (int64, error) |
|
||||
Open(key string) (ReadSeekCloser, error) |
|
||||
ServeFile(key string, w http.ResponseWriter, r *http.Request) |
|
||||
Size(key string) (int64, error) |
|
||||
} |
|
@ -0,0 +1,17 @@ |
|||||
|
package backends |
||||
|
|
||||
|
import ( |
||||
|
"errors" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
type Metadata struct { |
||||
|
DeleteKey string |
||||
|
Sha256sum string |
||||
|
Mimetype string |
||||
|
Size int64 |
||||
|
Expiry time.Time |
||||
|
ArchiveFiles []string |
||||
|
} |
||||
|
|
||||
|
var BadMetadata = errors.New("Corrupted metadata.") |
@ -0,0 +1,195 @@ |
|||||
|
package s3 |
||||
|
|
||||
|
import ( |
||||
|
"io" |
||||
|
"io/ioutil" |
||||
|
"os" |
||||
|
"strconv" |
||||
|
"time" |
||||
|
|
||||
|
"github.com/andreimarcu/linx-server/backends" |
||||
|
"github.com/andreimarcu/linx-server/helpers" |
||||
|
"github.com/aws/aws-sdk-go/aws" |
||||
|
"github.com/aws/aws-sdk-go/aws/awserr" |
||||
|
"github.com/aws/aws-sdk-go/aws/session" |
||||
|
"github.com/aws/aws-sdk-go/service/s3" |
||||
|
"github.com/aws/aws-sdk-go/service/s3/s3manager" |
||||
|
) |
||||
|
|
||||
|
type S3Backend struct { |
||||
|
bucket string |
||||
|
svc *s3.S3 |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Delete(key string) error { |
||||
|
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
}) |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
return nil |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Exists(key string) (bool, error) { |
||||
|
_, err := b.svc.HeadObject(&s3.HeadObjectInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
}) |
||||
|
return err == nil, err |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) { |
||||
|
var result *s3.HeadObjectOutput |
||||
|
result, err = b.svc.HeadObject(&s3.HeadObjectInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
}) |
||||
|
if err != nil { |
||||
|
if aerr, ok := err.(awserr.Error); ok { |
||||
|
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
||||
|
err = backends.NotFoundErr |
||||
|
} |
||||
|
} |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
metadata, err = unmapMetadata(result.Metadata) |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) { |
||||
|
var result *s3.GetObjectOutput |
||||
|
result, err = b.svc.GetObject(&s3.GetObjectInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
}) |
||||
|
if err != nil { |
||||
|
if aerr, ok := err.(awserr.Error); ok { |
||||
|
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
||||
|
err = backends.NotFoundErr |
||||
|
} |
||||
|
} |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
metadata, err = unmapMetadata(result.Metadata) |
||||
|
r = result.Body |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
func mapMetadata(m backends.Metadata) map[string]*string { |
||||
|
return map[string]*string{ |
||||
|
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)), |
||||
|
"Delete_key": aws.String(m.DeleteKey), |
||||
|
"Size": aws.String(strconv.FormatInt(m.Size, 10)), |
||||
|
"Mimetype": aws.String(m.Mimetype), |
||||
|
"Sha256sum": aws.String(m.Sha256sum), |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) { |
||||
|
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64) |
||||
|
if err != nil { |
||||
|
return m, err |
||||
|
} |
||||
|
m.Expiry = time.Unix(expiry, 0) |
||||
|
|
||||
|
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64) |
||||
|
if err != nil { |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
m.DeleteKey = aws.StringValue(input["Delete_key"]) |
||||
|
m.Mimetype = aws.StringValue(input["Mimetype"]) |
||||
|
m.Sha256sum = aws.StringValue(input["Sha256sum"]) |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) { |
||||
|
tmpDst, err := ioutil.TempFile("", "linx-server-upload") |
||||
|
if err != nil { |
||||
|
return m, err |
||||
|
} |
||||
|
defer tmpDst.Close() |
||||
|
defer os.Remove(tmpDst.Name()) |
||||
|
|
||||
|
bytes, err := io.Copy(tmpDst, r) |
||||
|
if bytes == 0 { |
||||
|
return m, backends.FileEmptyError |
||||
|
} else if err != nil { |
||||
|
return m, err |
||||
|
} |
||||
|
|
||||
|
m.Expiry = expiry |
||||
|
m.DeleteKey = deleteKey |
||||
|
m.Size = bytes |
||||
|
m.Mimetype, _ = helpers.DetectMime(tmpDst) |
||||
|
m.Sha256sum, _ = helpers.Sha256sum(tmpDst) |
||||
|
// XXX: we may not be able to write this to AWS easily
|
||||
|
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
|
||||
|
|
||||
|
uploader := s3manager.NewUploaderWithClient(b.svc) |
||||
|
input := &s3manager.UploadInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
Body: tmpDst, |
||||
|
Metadata: mapMetadata(m), |
||||
|
} |
||||
|
_, err = uploader.Upload(input) |
||||
|
if err != nil { |
||||
|
return |
||||
|
} |
||||
|
|
||||
|
return |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) Size(key string) (int64, error) { |
||||
|
input := &s3.HeadObjectInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
Key: aws.String(key), |
||||
|
} |
||||
|
result, err := b.svc.HeadObject(input) |
||||
|
if err != nil { |
||||
|
return 0, err |
||||
|
} |
||||
|
|
||||
|
return *result.ContentLength, nil |
||||
|
} |
||||
|
|
||||
|
func (b S3Backend) List() ([]string, error) { |
||||
|
var output []string |
||||
|
input := &s3.ListObjectsInput{ |
||||
|
Bucket: aws.String(b.bucket), |
||||
|
} |
||||
|
|
||||
|
results, err := b.svc.ListObjects(input) |
||||
|
if err != nil { |
||||
|
return nil, err |
||||
|
} |
||||
|
|
||||
|
|
||||
|
for _, object := range results.Contents { |
||||
|
output = append(output, *object.Key) |
||||
|
} |
||||
|
|
||||
|
return output, nil |
||||
|
} |
||||
|
|
||||
|
func NewS3Backend(bucket string, region string, endpoint string, forcePathStyle bool) S3Backend { |
||||
|
awsConfig := &aws.Config{} |
||||
|
if region != "" { |
||||
|
awsConfig.Region = aws.String(region) |
||||
|
} |
||||
|
if endpoint != "" { |
||||
|
awsConfig.Endpoint = aws.String(endpoint) |
||||
|
} |
||||
|
if forcePathStyle == true { |
||||
|
awsConfig.S3ForcePathStyle = aws.Bool(true) |
||||
|
} |
||||
|
|
||||
|
sess := session.Must(session.NewSession(awsConfig)) |
||||
|
svc := s3.New(sess) |
||||
|
return S3Backend{bucket: bucket, svc: svc} |
||||
|
} |
@ -0,0 +1,24 @@ |
|||||
|
package backends |
||||
|
|
||||
|
import ( |
||||
|
"errors" |
||||
|
"io" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
type StorageBackend interface { |
||||
|
Delete(key string) error |
||||
|
Exists(key string) (bool, error) |
||||
|
Head(key string) (Metadata, error) |
||||
|
Get(key string) (Metadata, io.ReadCloser, error) |
||||
|
Put(key string, r io.Reader, expiry time.Time, deleteKey string) (Metadata, error) |
||||
|
Size(key string) (int64, error) |
||||
|
} |
||||
|
|
||||
|
type MetaStorageBackend interface { |
||||
|
StorageBackend |
||||
|
List() ([]string, error) |
||||
|
} |
||||
|
|
||||
|
var NotFoundErr = errors.New("File not found.") |
||||
|
var FileEmptyError = errors.New("Empty file") |
@ -1,66 +1,67 @@ |
|||||
#!/bin/bash |
#!/bin/bash |
||||
|
|
||||
version="$1" |
|
||||
mkdir -p "binairies/""$version" |
|
||||
name="binairies/""$version""/linx-server-v""$version""_" |
|
||||
|
|
||||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64 |
|
||||
rice append --exec "$name"osx-amd64 |
|
||||
|
|
||||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386 |
|
||||
rice append --exec "$name"osx-386 |
|
||||
|
|
||||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64 |
|
||||
rice append --exec "$name"freebsd-amd64 |
|
||||
|
|
||||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386 |
|
||||
rice append --exec "$name"freebsd-386 |
|
||||
|
|
||||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64 |
|
||||
rice append --exec "$name"openbsd-amd64 |
|
||||
|
function build_binary_rice { |
||||
|
name="$1" |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch |
||||
|
rice append --exec "$name"osx-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch |
||||
|
rice append --exec "$name"freebsd-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch |
||||
|
rice append --exec "$name"openbsd-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in arm arm64 amd64 386; do |
||||
|
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch |
||||
|
rice append --exec "$name"linux-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe |
||||
|
rice append --exec "$name"windows-$arch.exe |
||||
|
done |
||||
|
} |
||||
|
|
||||
|
function build_binary { |
||||
|
name="$1" |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in arm arm64 amd64 386; do |
||||
|
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch |
||||
|
done |
||||
|
|
||||
|
for arch in amd64 386; do |
||||
|
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe |
||||
|
done |
||||
|
} |
||||
|
|
||||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386 |
|
||||
rice append --exec "$name"openbsd-386 |
|
||||
|
|
||||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm |
|
||||
rice append --exec "$name"linux-arm |
|
||||
|
|
||||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64 |
|
||||
rice append --exec "$name"linux-amd64 |
|
||||
|
|
||||
GOOS=linux GOARCH=386 go build -o "$name"linux-386 |
|
||||
rice append --exec "$name"linux-386 |
|
||||
|
|
||||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe |
|
||||
rice append --exec "$name"windows-amd64.exe |
|
||||
|
|
||||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe |
|
||||
rice append --exec "$name"windows-386.exe |
|
||||
|
version="$1" |
||||
|
mkdir -p "binaries/""$version" |
||||
|
|
||||
|
build_binary_rice "binaries/""$version""/linx-server-v""$version""_" |
||||
|
|
||||
cd linx-genkey |
cd linx-genkey |
||||
name="../binairies/""$version""/linx-genkey-v""$version""_" |
|
||||
|
|
||||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64 |
|
||||
|
|
||||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386 |
|
||||
|
|
||||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64 |
|
||||
|
|
||||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386 |
|
||||
|
|
||||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64 |
|
||||
|
|
||||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386 |
|
||||
|
|
||||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm |
|
||||
|
|
||||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64 |
|
||||
|
|
||||
GOOS=linux GOARCH=386 go build -o "$name"linux-386 |
|
||||
|
|
||||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe |
|
||||
|
|
||||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe |
|
||||
|
build_binary "../binaries/""$version""/linx-genkey-v""$version""_" |
||||
|
cd .. |
||||
|
|
||||
|
cd linx-cleanup |
||||
|
build_binary "../binaries/""$version""/linx-cleanup-v""$version""_" |
||||
cd .. |
cd .. |
@ -0,0 +1,13 @@ |
|||||
|
package expiry |
||||
|
|
||||
|
import ( |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
var NeverExpire = time.Unix(0, 0) |
||||
|
|
||||
|
// Determine if a file with expiry set to "ts" has expired yet
|
||||
|
func IsTsExpired(ts time.Time) bool { |
||||
|
now := time.Now() |
||||
|
return ts != NeverExpire && now.After(ts) |
||||
|
} |
@ -0,0 +1,70 @@ |
|||||
|
package helpers |
||||
|
|
||||
|
import ( |
||||
|
"archive/tar" |
||||
|
"archive/zip" |
||||
|
"compress/bzip2" |
||||
|
"compress/gzip" |
||||
|
"io" |
||||
|
"sort" |
||||
|
) |
||||
|
|
||||
|
type ReadSeekerAt interface { |
||||
|
io.Reader |
||||
|
io.Seeker |
||||
|
io.ReaderAt |
||||
|
} |
||||
|
|
||||
|
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) { |
||||
|
if mimetype == "application/x-tar" { |
||||
|
tReadr := tar.NewReader(r) |
||||
|
for { |
||||
|
hdr, err := tReadr.Next() |
||||
|
if err == io.EOF || err != nil { |
||||
|
break |
||||
|
} |
||||
|
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
||||
|
files = append(files, hdr.Name) |
||||
|
} |
||||
|
} |
||||
|
sort.Strings(files) |
||||
|
} else if mimetype == "application/x-gzip" { |
||||
|
gzf, err := gzip.NewReader(r) |
||||
|
if err == nil { |
||||
|
tReadr := tar.NewReader(gzf) |
||||
|
for { |
||||
|
hdr, err := tReadr.Next() |
||||
|
if err == io.EOF || err != nil { |
||||
|
break |
||||
|
} |
||||
|
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
||||
|
files = append(files, hdr.Name) |
||||
|
} |
||||
|
} |
||||
|
sort.Strings(files) |
||||
|
} |
||||
|
} else if mimetype == "application/x-bzip" { |
||||
|
bzf := bzip2.NewReader(r) |
||||
|
tReadr := tar.NewReader(bzf) |
||||
|
for { |
||||
|
hdr, err := tReadr.Next() |
||||
|
if err == io.EOF || err != nil { |
||||
|
break |
||||
|
} |
||||
|
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
||||
|
files = append(files, hdr.Name) |
||||
|
} |
||||
|
} |
||||
|
sort.Strings(files) |
||||
|
} else if mimetype == "application/zip" { |
||||
|
zf, err := zip.NewReader(r, size) |
||||
|
if err == nil { |
||||
|
for _, f := range zf.File { |
||||
|
files = append(files, f.Name) |
||||
|
} |
||||
|
} |
||||
|
sort.Strings(files) |
||||
|
} |
||||
|
|
||||
|
return |
||||
|
} |
@ -0,0 +1,67 @@ |
|||||
|
package helpers |
||||
|
|
||||
|
import ( |
||||
|
"encoding/hex" |
||||
|
"io" |
||||
|
"unicode" |
||||
|
|
||||
|
"github.com/minio/sha256-simd" |
||||
|
"gopkg.in/h2non/filetype.v1" |
||||
|
) |
||||
|
|
||||
|
func DetectMime(r io.ReadSeeker) (string, error) { |
||||
|
// Get first 512 bytes for mimetype detection
|
||||
|
header := make([]byte, 512) |
||||
|
|
||||
|
r.Seek(0, 0) |
||||
|
r.Read(header) |
||||
|
r.Seek(0, 0) |
||||
|
|
||||
|
kind, err := filetype.Match(header) |
||||
|
if err != nil { |
||||
|
return "application/octet-stream", err |
||||
|
} else if kind.MIME.Value != "" { |
||||
|
return kind.MIME.Value, nil |
||||
|
} |
||||
|
|
||||
|
// Check if the file seems anything like text
|
||||
|
if printable(header) { |
||||
|
return "text/plain", nil |
||||
|
} else { |
||||
|
return "application/octet-stream", nil |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func Sha256sum(r io.ReadSeeker) (string, error) { |
||||
|
hasher := sha256.New() |
||||
|
|
||||
|
r.Seek(0, 0) |
||||
|
_, err := io.Copy(hasher, r) |
||||
|
if err != nil { |
||||
|
return "", err |
||||
|
} |
||||
|
|
||||
|
r.Seek(0, 0) |
||||
|
|
||||
|
return hex.EncodeToString(hasher.Sum(nil)), nil |
||||
|
} |
||||
|
|
||||
|
func printable(data []byte) bool { |
||||
|
for i, b := range data { |
||||
|
r := rune(b) |
||||
|
|
||||
|
// A null terminator that's not at the beginning of the file
|
||||
|
if r == 0 && i == 0 { |
||||
|
return false |
||||
|
} else if r == 0 && i < 0 { |
||||
|
continue |
||||
|
} |
||||
|
|
||||
|
if r > unicode.MaxASCII { |
||||
|
return false |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
|
return true |
||||
|
} |
@ -0,0 +1,27 @@ |
|||||
|
Copyright (c) 2009 The Go Authors. All rights reserved. |
||||
|
|
||||
|
Redistribution and use in source and binary forms, with or without |
||||
|
modification, are permitted provided that the following conditions are |
||||
|
met: |
||||
|
|
||||
|
* Redistributions of source code must retain the above copyright |
||||
|
notice, this list of conditions and the following disclaimer. |
||||
|
* Redistributions in binary form must reproduce the above |
||||
|
copyright notice, this list of conditions and the following disclaimer |
||||
|
in the documentation and/or other materials provided with the |
||||
|
distribution. |
||||
|
* Neither the name of Google Inc. nor the names of its |
||||
|
contributors may be used to endorse or promote products derived from |
||||
|
this software without specific prior written permission. |
||||
|
|
||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
@ -0,0 +1,218 @@ |
|||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
|
// Use of this source code is governed by a BSD-style
|
||||
|
// license that can be found in the LICENSE file.
|
||||
|
|
||||
|
// HTTP file system request handler
|
||||
|
|
||||
|
package httputil |
||||
|
|
||||
|
import ( |
||||
|
"net/http" |
||||
|
"net/textproto" |
||||
|
"strings" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
// scanETag determines if a syntactically valid ETag is present at s. If so,
|
||||
|
// the ETag and remaining text after consuming ETag is returned. Otherwise,
|
||||
|
// it returns "", "".
|
||||
|
func scanETag(s string) (etag string, remain string) { |
||||
|
s = textproto.TrimString(s) |
||||
|
start := 0 |
||||
|
if strings.HasPrefix(s, "W/") { |
||||
|
start = 2 |
||||
|
} |
||||
|
if len(s[start:]) < 2 || s[start] != '"' { |
||||
|
return "", "" |
||||
|
} |
||||
|
// ETag is either W/"text" or "text".
|
||||
|
// See RFC 7232 2.3.
|
||||
|
for i := start + 1; i < len(s); i++ { |
||||
|
c := s[i] |
||||
|
switch { |
||||
|
// Character values allowed in ETags.
|
||||
|
case c == 0x21 || c >= 0x23 && c <= 0x7E || c >= 0x80: |
||||
|
case c == '"': |
||||
|
return s[:i+1], s[i+1:] |
||||
|
default: |
||||
|
return "", "" |
||||
|
} |
||||
|
} |
||||
|
return "", "" |
||||
|
} |
||||
|
|
||||
|
// etagStrongMatch reports whether a and b match using strong ETag comparison.
|
||||
|
// Assumes a and b are valid ETags.
|
||||
|
func etagStrongMatch(a, b string) bool { |
||||
|
return a == b && a != "" && a[0] == '"' |
||||
|
} |
||||
|
|
||||
|
// etagWeakMatch reports whether a and b match using weak ETag comparison.
|
||||
|
// Assumes a and b are valid ETags.
|
||||
|
func etagWeakMatch(a, b string) bool { |
||||
|
return strings.TrimPrefix(a, "W/") == strings.TrimPrefix(b, "W/") |
||||
|
} |
||||
|
|
||||
|
// condResult is the result of an HTTP request precondition check.
|
||||
|
// See https://tools.ietf.org/html/rfc7232 section 3.
|
||||
|
type condResult int |
||||
|
|
||||
|
const ( |
||||
|
condNone condResult = iota |
||||
|
condTrue |
||||
|
condFalse |
||||
|
) |
||||
|
|
||||
|
func checkIfMatch(w http.ResponseWriter, r *http.Request) condResult { |
||||
|
im := r.Header.Get("If-Match") |
||||
|
if im == "" { |
||||
|
return condNone |
||||
|
} |
||||
|
for { |
||||
|
im = textproto.TrimString(im) |
||||
|
if len(im) == 0 { |
||||
|
break |
||||
|
} |
||||
|
if im[0] == ',' { |
||||
|
im = im[1:] |
||||
|
continue |
||||
|
} |
||||
|
if im[0] == '*' { |
||||
|
return condTrue |
||||
|
} |
||||
|
etag, remain := scanETag(im) |
||||
|
if etag == "" { |
||||
|
break |
||||
|
} |
||||
|
if etagStrongMatch(etag, w.Header().Get("Etag")) { |
||||
|
return condTrue |
||||
|
} |
||||
|
im = remain |
||||
|
} |
||||
|
|
||||
|
return condFalse |
||||
|
} |
||||
|
|
||||
|
func checkIfUnmodifiedSince(r *http.Request, modtime time.Time) condResult { |
||||
|
ius := r.Header.Get("If-Unmodified-Since") |
||||
|
if ius == "" || isZeroTime(modtime) { |
||||
|
return condNone |
||||
|
} |
||||
|
if t, err := http.ParseTime(ius); err == nil { |
||||
|
// The Date-Modified header truncates sub-second precision, so
|
||||
|
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
||||
|
if modtime.Before(t.Add(1 * time.Second)) { |
||||
|
return condTrue |
||||
|
} |
||||
|
return condFalse |
||||
|
} |
||||
|
return condNone |
||||
|
} |
||||
|
|
||||
|
func checkIfNoneMatch(w http.ResponseWriter, r *http.Request) condResult { |
||||
|
inm := r.Header.Get("If-None-Match") |
||||
|
if inm == "" { |
||||
|
return condNone |
||||
|
} |
||||
|
buf := inm |
||||
|
for { |
||||
|
buf = textproto.TrimString(buf) |
||||
|
if len(buf) == 0 { |
||||
|
break |
||||
|
} |
||||
|
if buf[0] == ',' { |
||||
|
buf = buf[1:] |
||||
|
} |
||||
|
if buf[0] == '*' { |
||||
|
return condFalse |
||||
|
} |
||||
|
etag, remain := scanETag(buf) |
||||
|
if etag == "" { |
||||
|
break |
||||
|
} |
||||
|
if etagWeakMatch(etag, w.Header().Get("Etag")) { |
||||
|
return condFalse |
||||
|
} |
||||
|
buf = remain |
||||
|
} |
||||
|
return condTrue |
||||
|
} |
||||
|
|
||||
|
func checkIfModifiedSince(r *http.Request, modtime time.Time) condResult { |
||||
|
if r.Method != "GET" && r.Method != "HEAD" { |
||||
|
return condNone |
||||
|
} |
||||
|
ims := r.Header.Get("If-Modified-Since") |
||||
|
if ims == "" || isZeroTime(modtime) { |
||||
|
return condNone |
||||
|
} |
||||
|
t, err := http.ParseTime(ims) |
||||
|
if err != nil { |
||||
|
return condNone |
||||
|
} |
||||
|
// The Date-Modified header truncates sub-second precision, so
|
||||
|
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
||||
|
if modtime.Before(t.Add(1 * time.Second)) { |
||||
|
return condFalse |
||||
|
} |
||||
|
return condTrue |
||||
|
} |
||||
|
|
||||
|
var unixEpochTime = time.Unix(0, 0) |
||||
|
|
||||
|
// isZeroTime reports whether t is obviously unspecified (either zero or Unix()=0).
|
||||
|
func isZeroTime(t time.Time) bool { |
||||
|
return t.IsZero() || t.Equal(unixEpochTime) |
||||
|
} |
||||
|
|
||||
|
func setLastModified(w http.ResponseWriter, modtime time.Time) { |
||||
|
if !isZeroTime(modtime) { |
||||
|
w.Header().Set("Last-Modified", modtime.UTC().Format(http.TimeFormat)) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
func writeNotModified(w http.ResponseWriter) { |
||||
|
// RFC 7232 section 4.1:
|
||||
|
// a sender SHOULD NOT generate representation metadata other than the
|
||||
|
// above listed fields unless said metadata exists for the purpose of
|
||||
|
// guiding cache updates (e.g., Last-Modified might be useful if the
|
||||
|
// response does not have an ETag field).
|
||||
|
h := w.Header() |
||||
|
delete(h, "Content-Type") |
||||
|
delete(h, "Content-Length") |
||||
|
if h.Get("Etag") != "" { |
||||
|
delete(h, "Last-Modified") |
||||
|
} |
||||
|
w.WriteHeader(http.StatusNotModified) |
||||
|
} |
||||
|
|
||||
|
// CheckPreconditions evaluates request preconditions and reports whether a precondition
|
||||
|
// resulted in sending StatusNotModified or StatusPreconditionFailed.
|
||||
|
func CheckPreconditions(w http.ResponseWriter, r *http.Request, modtime time.Time) (done bool) { |
||||
|
// This function carefully follows RFC 7232 section 6.
|
||||
|
ch := checkIfMatch(w, r) |
||||
|
if ch == condNone { |
||||
|
ch = checkIfUnmodifiedSince(r, modtime) |
||||
|
} |
||||
|
if ch == condFalse { |
||||
|
w.WriteHeader(http.StatusPreconditionFailed) |
||||
|
return true |
||||
|
} |
||||
|
switch checkIfNoneMatch(w, r) { |
||||
|
case condFalse: |
||||
|
if r.Method == "GET" || r.Method == "HEAD" { |
||||
|
writeNotModified(w) |
||||
|
return true |
||||
|
} else { |
||||
|
w.WriteHeader(http.StatusPreconditionFailed) |
||||
|
return true |
||||
|
} |
||||
|
case condNone: |
||||
|
if checkIfModifiedSince(r, modtime) == condFalse { |
||||
|
writeNotModified(w) |
||||
|
return true |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return false |
||||
|
} |
@ -0,0 +1,46 @@ |
|||||
|
package main |
||||
|
|
||||
|
import ( |
||||
|
"flag" |
||||
|
"log" |
||||
|
|
||||
|
"github.com/andreimarcu/linx-server/backends/localfs" |
||||
|
"github.com/andreimarcu/linx-server/expiry" |
||||
|
) |
||||
|
|
||||
|
func main() { |
||||
|
var filesDir string |
||||
|
var metaDir string |
||||
|
var noLogs bool |
||||
|
|
||||
|
flag.StringVar(&filesDir, "filespath", "files/", |
||||
|
"path to files directory") |
||||
|
flag.StringVar(&metaDir, "metapath", "meta/", |
||||
|
"path to metadata directory") |
||||
|
flag.BoolVar(&noLogs, "nologs", false, |
||||
|
"don't log deleted files") |
||||
|
flag.Parse() |
||||
|
|
||||
|
fileBackend := localfs.NewLocalfsBackend(metaDir, filesDir) |
||||
|
|
||||
|
files, err := fileBackend.List() |
||||
|
if err != nil { |
||||
|
panic(err) |
||||
|
} |
||||
|
|
||||
|
for _, filename := range files { |
||||
|
metadata, err := fileBackend.Head(filename) |
||||
|
if err != nil { |
||||
|
if !noLogs { |
||||
|
log.Printf("Failed to find metadata for %s", filename) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
if expiry.IsTsExpired(metadata.Expiry) { |
||||
|
if !noLogs { |
||||
|
log.Printf("Delete %s", filename) |
||||
|
} |
||||
|
fileBackend.Delete(filename) |
||||
|
} |
||||
|
} |
||||
|
} |
@ -1,222 +0,0 @@ |
|||||
package main |
|
||||
|
|
||||
import ( |
|
||||
"archive/tar" |
|
||||
"archive/zip" |
|
||||
"bytes" |
|
||||
"compress/bzip2" |
|
||||
"compress/gzip" |
|
||||
"crypto/sha256" |
|
||||
"encoding/hex" |
|
||||
"encoding/json" |
|
||||
"errors" |
|
||||
"io" |
|
||||
"sort" |
|
||||
"time" |
|
||||
"unicode" |
|
||||
|
|
||||
"github.com/dchest/uniuri" |
|
||||
"gopkg.in/h2non/filetype.v1" |
|
||||
) |
|
||||
|
|
||||
type MetadataJSON struct { |
|
||||
DeleteKey string `json:"delete_key"` |
|
||||
Sha256sum string `json:"sha256sum"` |
|
||||
Mimetype string `json:"mimetype"` |
|
||||
Size int64 `json:"size"` |
|
||||
Expiry int64 `json:"expiry"` |
|
||||
ArchiveFiles []string `json:"archive_files,omitempty"` |
|
||||
ShortURL string `json:"short_url"` |
|
||||
} |
|
||||
|
|
||||
type Metadata struct { |
|
||||
DeleteKey string |
|
||||
Sha256sum string |
|
||||
Mimetype string |
|
||||
Size int64 |
|
||||
Expiry time.Time |
|
||||
ArchiveFiles []string |
|
||||
ShortURL string |
|
||||
} |
|
||||
|
|
||||
var NotFoundErr = errors.New("File not found.") |
|
||||
var BadMetadata = errors.New("Corrupted metadata.") |
|
||||
|
|
||||
func generateMetadata(fName string, exp time.Time, delKey string) (m Metadata, err error) { |
|
||||
file, err := fileBackend.Open(fName) |
|
||||
if err != nil { |
|
||||
return |
|
||||
} |
|
||||
defer file.Close() |
|
||||
|
|
||||
m.Size, err = fileBackend.Size(fName) |
|
||||
if err != nil { |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
m.Expiry = exp |
|
||||
|
|
||||
if delKey == "" { |
|
||||
m.DeleteKey = uniuri.NewLen(30) |
|
||||
} else { |
|
||||
m.DeleteKey = delKey |
|
||||
} |
|
||||
|
|
||||
// Get first 512 bytes for mimetype detection
|
|
||||
header := make([]byte, 512) |
|
||||
file.Read(header) |
|
||||
|
|
||||
kind, err := filetype.Match(header) |
|
||||
if err != nil { |
|
||||
m.Mimetype = "application/octet-stream" |
|
||||
} else { |
|
||||
m.Mimetype = kind.MIME.Value |
|
||||
} |
|
||||
|
|
||||
if m.Mimetype == "" { |
|
||||
// Check if the file seems anything like text
|
|
||||
if printable(header) { |
|
||||
m.Mimetype = "text/plain" |
|
||||
} else { |
|
||||
m.Mimetype = "application/octet-stream" |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// Compute the sha256sum
|
|
||||
hasher := sha256.New() |
|
||||
file.Seek(0, 0) |
|
||||
_, err = io.Copy(hasher, file) |
|
||||
if err == nil { |
|
||||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil)) |
|
||||
} |
|
||||
file.Seek(0, 0) |
|
||||
|
|
||||
// If archive, grab list of filenames
|
|
||||
if m.Mimetype == "application/x-tar" { |
|
||||
tReadr := tar.NewReader(file) |
|
||||
for { |
|
||||
hdr, err := tReadr.Next() |
|
||||
if err == io.EOF || err != nil { |
|
||||
break |
|
||||
} |
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|
||||
} |
|
||||
} |
|
||||
sort.Strings(m.ArchiveFiles) |
|
||||
} else if m.Mimetype == "application/x-gzip" { |
|
||||
gzf, err := gzip.NewReader(file) |
|
||||
if err == nil { |
|
||||
tReadr := tar.NewReader(gzf) |
|
||||
for { |
|
||||
hdr, err := tReadr.Next() |
|
||||
if err == io.EOF || err != nil { |
|
||||
break |
|
||||
} |
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|
||||
} |
|
||||
} |
|
||||
sort.Strings(m.ArchiveFiles) |
|
||||
} |
|
||||
} else if m.Mimetype == "application/x-bzip" { |
|
||||
bzf := bzip2.NewReader(file) |
|
||||
tReadr := tar.NewReader(bzf) |
|
||||
for { |
|
||||
hdr, err := tReadr.Next() |
|
||||
if err == io.EOF || err != nil { |
|
||||
break |
|
||||
} |
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|
||||
} |
|
||||
} |
|
||||
sort.Strings(m.ArchiveFiles) |
|
||||
} else if m.Mimetype == "application/zip" { |
|
||||
zf, err := zip.NewReader(file, m.Size) |
|
||||
if err == nil { |
|
||||
for _, f := range zf.File { |
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, f.Name) |
|
||||
} |
|
||||
} |
|
||||
sort.Strings(m.ArchiveFiles) |
|
||||
} |
|
||||
|
|
||||
return |
|
||||
} |
|
||||
|
|
||||
func metadataWrite(filename string, metadata *Metadata) error { |
|
||||
mjson := MetadataJSON{} |
|
||||
mjson.DeleteKey = metadata.DeleteKey |
|
||||
mjson.Mimetype = metadata.Mimetype |
|
||||
mjson.ArchiveFiles = metadata.ArchiveFiles |
|
||||
mjson.Sha256sum = metadata.Sha256sum |
|
||||
mjson.Expiry = metadata.Expiry.Unix() |
|
||||
mjson.Size = metadata.Size |
|
||||
mjson.ShortURL = metadata.ShortURL |
|
||||
|
|
||||
byt, err := json.Marshal(mjson) |
|
||||
if err != nil { |
|
||||
return err |
|
||||
} |
|
||||
|
|
||||
if _, err := metaBackend.Put(filename, bytes.NewBuffer(byt)); err != nil { |
|
||||
return err |
|
||||
} |
|
||||
|
|
||||
return nil |
|
||||
} |
|
||||
|
|
||||
func metadataRead(filename string) (metadata Metadata, err error) { |
|
||||
b, err := metaBackend.Get(filename) |
|
||||
if err != nil { |
|
||||
// Metadata does not exist, generate one
|
|
||||
newMData, err := generateMetadata(filename, neverExpire, "") |
|
||||
if err != nil { |
|
||||
return metadata, err |
|
||||
} |
|
||||
metadataWrite(filename, &newMData) |
|
||||
|
|
||||
b, err = metaBackend.Get(filename) |
|
||||
if err != nil { |
|
||||
return metadata, BadMetadata |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
mjson := MetadataJSON{} |
|
||||
|
|
||||
err = json.Unmarshal(b, &mjson) |
|
||||
if err != nil { |
|
||||
return metadata, BadMetadata |
|
||||
} |
|
||||
|
|
||||
metadata.DeleteKey = mjson.DeleteKey |
|
||||
metadata.Mimetype = mjson.Mimetype |
|
||||
metadata.ArchiveFiles = mjson.ArchiveFiles |
|
||||
metadata.Sha256sum = mjson.Sha256sum |
|
||||
metadata.Expiry = time.Unix(mjson.Expiry, 0) |
|
||||
metadata.Size = mjson.Size |
|
||||
metadata.ShortURL = mjson.ShortURL |
|
||||
|
|
||||
return |
|
||||
} |
|
||||
|
|
||||
func printable(data []byte) bool { |
|
||||
for i, b := range data { |
|
||||
r := rune(b) |
|
||||
|
|
||||
// A null terminator that's not at the beginning of the file
|
|
||||
if r == 0 && i == 0 { |
|
||||
return false |
|
||||
} else if r == 0 && i < 0 { |
|
||||
continue |
|
||||
} |
|
||||
|
|
||||
if r > unicode.MaxASCII { |
|
||||
return false |
|
||||
} |
|
||||
|
|
||||
} |
|
||||
|
|
||||
return true |
|
||||
} |
|
@ -1,89 +0,0 @@ |
|||||
package main |
|
||||
|
|
||||
import ( |
|
||||
"bytes" |
|
||||
"encoding/json" |
|
||||
"errors" |
|
||||
"net/http" |
|
||||
|
|
||||
"github.com/zenazn/goji/web" |
|
||||
) |
|
||||
|
|
||||
type shortenerRequest struct { |
|
||||
LongURL string `json:"longUrl"` |
|
||||
} |
|
||||
|
|
||||
type shortenerResponse struct { |
|
||||
Kind string `json:"kind"` |
|
||||
ID string `json:"id"` |
|
||||
LongURL string `json:"longUrl"` |
|
||||
Error struct { |
|
||||
Code int `json:"code"` |
|
||||
Message string `json:"message"` |
|
||||
} `json:"error"` |
|
||||
} |
|
||||
|
|
||||
func shortURLHandler(c web.C, w http.ResponseWriter, r *http.Request) { |
|
||||
fileName := c.URLParams["name"] |
|
||||
|
|
||||
err := checkFile(fileName) |
|
||||
if err == NotFoundErr { |
|
||||
notFoundHandler(c, w, r) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
metadata, err := metadataRead(fileName) |
|
||||
if err != nil { |
|
||||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.") |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
if metadata.ShortURL == "" { |
|
||||
url, err := shortenURL(getSiteURL(r) + fileName) |
|
||||
if err != nil { |
|
||||
oopsHandler(c, w, r, RespJSON, err.Error()) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
metadata.ShortURL = url |
|
||||
|
|
||||
err = metadataWrite(fileName, &metadata) |
|
||||
if err != nil { |
|
||||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.") |
|
||||
return |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
js, _ := json.Marshal(map[string]string{ |
|
||||
"shortUrl": metadata.ShortURL, |
|
||||
}) |
|
||||
w.Write(js) |
|
||||
return |
|
||||
} |
|
||||
|
|
||||
func shortenURL(url string) (string, error) { |
|
||||
apiURL := "https://www.googleapis.com/urlshortener/v1/url?key=" + Config.googleShorterAPIKey |
|
||||
jsonStr, _ := json.Marshal(shortenerRequest{LongURL: url}) |
|
||||
|
|
||||
req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonStr)) |
|
||||
req.Header.Set("Content-Type", "application/json") |
|
||||
|
|
||||
client := &http.Client{} |
|
||||
resp, err := client.Do(req) |
|
||||
if err != nil { |
|
||||
return "", err |
|
||||
} |
|
||||
defer resp.Body.Close() |
|
||||
|
|
||||
shortenerResponse := new(shortenerResponse) |
|
||||
err = json.NewDecoder(resp.Body).Decode(shortenerResponse) |
|
||||
if err != nil { |
|
||||
return "", err |
|
||||
} |
|
||||
|
|
||||
if shortenerResponse.Error.Message != "" { |
|
||||
return "", errors.New(shortenerResponse.Error.Message) |
|
||||
} |
|
||||
|
|
||||
return shortenerResponse.ID, nil |
|
||||
} |
|
@ -1,39 +0,0 @@ |
|||||
document.getElementById('shorturl').addEventListener('click', function (e) { |
|
||||
e.preventDefault(); |
|
||||
|
|
||||
if (e.target.href !== "") return; |
|
||||
|
|
||||
xhr = new XMLHttpRequest(); |
|
||||
xhr.open("GET", e.target.dataset.url, true); |
|
||||
xhr.setRequestHeader('Accept', 'application/json'); |
|
||||
xhr.onreadystatechange = function () { |
|
||||
if (xhr.readyState === 4) { |
|
||||
var resp = JSON.parse(xhr.responseText); |
|
||||
|
|
||||
if (xhr.status === 200 && resp.error == null) { |
|
||||
e.target.innerText = resp.shortUrl; |
|
||||
e.target.href = resp.shortUrl; |
|
||||
e.target.setAttribute('aria-label', 'Click to copy into clipboard') |
|
||||
} else { |
|
||||
e.target.setAttribute('aria-label', resp.error) |
|
||||
} |
|
||||
} |
|
||||
}; |
|
||||
xhr.send(); |
|
||||
}); |
|
||||
|
|
||||
var clipboard = new Clipboard("#shorturl", { |
|
||||
text: function (trigger) { |
|
||||
if (trigger.href == null) return; |
|
||||
|
|
||||
return trigger.href; |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
clipboard.on('success', function (e) { |
|
||||
e.trigger.setAttribute('aria-label', 'Successfully copied') |
|
||||
}); |
|
||||
|
|
||||
clipboard.on('error', function (e) { |
|
||||
e.trigger.setAttribute('aria-label', 'Your browser does not support coping to clipboard') |
|
||||
}); |
|
@ -1,5 +1,7 @@ |
|||||
{% extends "base.html" %} |
{% extends "base.html" %} |
||||
|
|
||||
{% block content %} |
{% block content %} |
||||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a> |
|
||||
|
<div class="error-404"> |
||||
|
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a> |
||||
|
</div> |
||||
{% endblock %} |
{% endblock %} |
@ -1,7 +1,7 @@ |
|||||
{% extends "base.html" %} |
{% extends "base.html" %} |
||||
|
|
||||
{% block main %} |
{% block main %} |
||||
<a href="{{ sitepath }}selif/{{ filename }}"> |
|
||||
<img class="display-image" src="{{ sitepath }}selif/{{ filename }}" /> |
|
||||
|
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}"> |
||||
|
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" /> |
||||
</a> |
</a> |
||||
{% endblock %} |
{% endblock %} |
@ -1,10 +1,10 @@ |
|||||
{% extends "base.html" %} |
{% extends "base.html" %} |
||||
|
|
||||
{% block main %} |
{% block main %} |
||||
<object class="display-pdf" data="{{ sitepath }}selif/{{ filename }}" type="application/pdf"> |
|
||||
|
<object class="display-pdf" data="{{ sitepath }}{{ selifpath }}{{ filename }}" type="application/pdf"> |
||||
|
|
||||
<p>It appears your Web browser is not configured to display PDF files. |
<p>It appears your Web browser is not configured to display PDF files. |
||||
No worries, just <a href="{{ sitepath }}selif/{{ filename }}">click here to download the PDF file.</a></p> |
|
||||
|
No worries, just <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here to download the PDF file.</a></p> |
||||
|
|
||||
</object> |
</object> |
||||
{% endblock %} |
{% endblock %} |
@ -0,0 +1,28 @@ |
|||||
|
package torrent |
||||
|
|
||||
|
import ( |
||||
|
"crypto/sha1" |
||||
|
) |
||||
|
|
||||
|
const ( |
||||
|
TORRENT_PIECE_LENGTH = 262144 |
||||
|
) |
||||
|
|
||||
|
type TorrentInfo struct { |
||||
|
PieceLength int `bencode:"piece length"` |
||||
|
Pieces string `bencode:"pieces"` |
||||
|
Name string `bencode:"name"` |
||||
|
Length int `bencode:"length"` |
||||
|
} |
||||
|
|
||||
|
type Torrent struct { |
||||
|
Encoding string `bencode:"encoding"` |
||||
|
Info TorrentInfo `bencode:"info"` |
||||
|
UrlList []string `bencode:"url-list"` |
||||
|
} |
||||
|
|
||||
|
func HashPiece(piece []byte) []byte { |
||||
|
h := sha1.New() |
||||
|
h.Write(piece) |
||||
|
return h.Sum(nil) |
||||
|
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue