committed by
GitHub
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 1755 additions and 1032 deletions
-
1.gitignore
-
4.travis.yml
-
19Dockerfile
-
37README.md
-
155backends/localfs/localfs.go
-
6backends/meta.go
-
73backends/metajson/metajson.go
-
210backends/s3/s3.go
-
21backends/storage.go
-
141build.sh
-
11csp.go
-
12csp_test.go
-
19delete.go
-
69display.go
-
2expiry.go
-
61fileserve.go
-
70helpers/archive.go
-
83helpers/helpers.go
-
29helpers/helpers_test.go
-
27httputil/LICENSE
-
218httputil/conditional.go
-
10linx-cleanup/cleanup.go
-
165meta.go
-
46pages.go
-
68server.go
-
206server_test.go
-
89shorturl.go
-
10static/css/dropzone.css
-
3static/css/github-markdown.css
-
211static/css/linx.css
-
116static/js/bin.js
-
39static/js/shorturl.js
-
171static/js/upload.js
-
1templates.go
-
4templates/404.html
-
16templates/API.html
-
3templates/base.html
-
4templates/display/audio.html
-
22templates/display/base.html
-
37templates/display/bin.html
-
2templates/display/file.html
-
4templates/display/image.html
-
4templates/display/pdf.html
-
34templates/display/story.html
-
4templates/display/video.html
-
6templates/index.html
-
20templates/paste.html
-
64torrent.go
-
28torrent/torrent.go
-
7torrent_test.go
-
125upload.go
@ -1,8 +1,8 @@ |
|||
language: go |
|||
|
|||
go: |
|||
- 1.5 |
|||
- 1.6 |
|||
- "1.10" |
|||
- "1.11" |
|||
|
|||
before_script: |
|||
- go vet ./... |
|||
|
@ -1,15 +1,28 @@ |
|||
FROM golang:alpine |
|||
FROM golang:alpine3.8 AS build |
|||
|
|||
COPY . /go/src/github.com/andreimarcu/linx-server |
|||
WORKDIR /go/src/github.com/andreimarcu/linx-server |
|||
|
|||
RUN set -ex \ |
|||
&& apk add --no-cache --virtual .build-deps git \ |
|||
&& go get github.com/andreimarcu/linx-server \ |
|||
&& go get -v . \ |
|||
&& apk del .build-deps |
|||
|
|||
FROM alpine:3.8 |
|||
|
|||
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server |
|||
|
|||
ENV GOPATH /go |
|||
ENV SSL_CERT_FILE /etc/ssl/cert.pem |
|||
|
|||
COPY static /go/src/github.com/andreimarcu/linx-server/static/ |
|||
COPY templates /go/src/github.com/andreimarcu/linx-server/templates/ |
|||
|
|||
RUN mkdir -p /data/files && mkdir -p /data/meta && chown -R 65534:65534 /data |
|||
|
|||
VOLUME ["/data/files", "/data/meta"] |
|||
|
|||
EXPOSE 8080 |
|||
USER nobody |
|||
ENTRYPOINT ["/go/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"] |
|||
ENTRYPOINT ["/usr/local/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"] |
|||
CMD ["-sitename=linx", "-allowhotlink"] |
@ -1,73 +0,0 @@ |
|||
package metajson |
|||
|
|||
import ( |
|||
"bytes" |
|||
"encoding/json" |
|||
"time" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
) |
|||
|
|||
type MetadataJSON struct { |
|||
DeleteKey string `json:"delete_key"` |
|||
Sha256sum string `json:"sha256sum"` |
|||
Mimetype string `json:"mimetype"` |
|||
Size int64 `json:"size"` |
|||
Expiry int64 `json:"expiry"` |
|||
ArchiveFiles []string `json:"archive_files,omitempty"` |
|||
ShortURL string `json:"short_url"` |
|||
} |
|||
|
|||
type MetaJSONBackend struct { |
|||
storage backends.MetaStorageBackend |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Put(key string, metadata *backends.Metadata) error { |
|||
mjson := MetadataJSON{} |
|||
mjson.DeleteKey = metadata.DeleteKey |
|||
mjson.Mimetype = metadata.Mimetype |
|||
mjson.ArchiveFiles = metadata.ArchiveFiles |
|||
mjson.Sha256sum = metadata.Sha256sum |
|||
mjson.Expiry = metadata.Expiry.Unix() |
|||
mjson.Size = metadata.Size |
|||
mjson.ShortURL = metadata.ShortURL |
|||
|
|||
byt, err := json.Marshal(mjson) |
|||
if err != nil { |
|||
return err |
|||
} |
|||
|
|||
if _, err := m.storage.Put(key, bytes.NewBuffer(byt)); err != nil { |
|||
return err |
|||
} |
|||
|
|||
return nil |
|||
} |
|||
|
|||
func (m MetaJSONBackend) Get(key string) (metadata backends.Metadata, err error) { |
|||
b, err := m.storage.Get(key) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
mjson := MetadataJSON{} |
|||
|
|||
err = json.Unmarshal(b, &mjson) |
|||
if err != nil { |
|||
return metadata, backends.BadMetadata |
|||
} |
|||
|
|||
metadata.DeleteKey = mjson.DeleteKey |
|||
metadata.Mimetype = mjson.Mimetype |
|||
metadata.ArchiveFiles = mjson.ArchiveFiles |
|||
metadata.Sha256sum = mjson.Sha256sum |
|||
metadata.Expiry = time.Unix(mjson.Expiry, 0) |
|||
metadata.Size = mjson.Size |
|||
metadata.ShortURL = mjson.ShortURL |
|||
|
|||
return |
|||
} |
|||
|
|||
func NewMetaJSONBackend(storage backends.MetaStorageBackend) MetaJSONBackend { |
|||
return MetaJSONBackend{storage: storage} |
|||
} |
@ -0,0 +1,210 @@ |
|||
package s3 |
|||
|
|||
import ( |
|||
"io" |
|||
"io/ioutil" |
|||
"os" |
|||
"strconv" |
|||
"time" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/andreimarcu/linx-server/helpers" |
|||
"github.com/aws/aws-sdk-go/aws" |
|||
"github.com/aws/aws-sdk-go/aws/awserr" |
|||
"github.com/aws/aws-sdk-go/aws/session" |
|||
"github.com/aws/aws-sdk-go/service/s3" |
|||
"github.com/aws/aws-sdk-go/service/s3/s3manager" |
|||
) |
|||
|
|||
type S3Backend struct { |
|||
bucket string |
|||
svc *s3.S3 |
|||
} |
|||
|
|||
func (b S3Backend) Delete(key string) error { |
|||
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
return err |
|||
} |
|||
return nil |
|||
} |
|||
|
|||
func (b S3Backend) Exists(key string) (bool, error) { |
|||
_, err := b.svc.HeadObject(&s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
return err == nil, err |
|||
} |
|||
|
|||
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) { |
|||
var result *s3.HeadObjectOutput |
|||
result, err = b.svc.HeadObject(&s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
if aerr, ok := err.(awserr.Error); ok { |
|||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
|||
err = backends.NotFoundErr |
|||
} |
|||
} |
|||
return |
|||
} |
|||
|
|||
metadata, err = unmapMetadata(result.Metadata) |
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) { |
|||
var result *s3.GetObjectOutput |
|||
result, err = b.svc.GetObject(&s3.GetObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
}) |
|||
if err != nil { |
|||
if aerr, ok := err.(awserr.Error); ok { |
|||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" { |
|||
err = backends.NotFoundErr |
|||
} |
|||
} |
|||
return |
|||
} |
|||
|
|||
metadata, err = unmapMetadata(result.Metadata) |
|||
r = result.Body |
|||
return |
|||
} |
|||
|
|||
func mapMetadata(m backends.Metadata) map[string]*string { |
|||
return map[string]*string{ |
|||
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)), |
|||
"Delete_key": aws.String(m.DeleteKey), |
|||
"Size": aws.String(strconv.FormatInt(m.Size, 10)), |
|||
"Mimetype": aws.String(m.Mimetype), |
|||
"Sha256sum": aws.String(m.Sha256sum), |
|||
} |
|||
} |
|||
|
|||
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) { |
|||
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64) |
|||
if err != nil { |
|||
return m, err |
|||
} |
|||
m.Expiry = time.Unix(expiry, 0) |
|||
|
|||
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
m.DeleteKey = aws.StringValue(input["Delete_key"]) |
|||
m.Mimetype = aws.StringValue(input["Mimetype"]) |
|||
m.Sha256sum = aws.StringValue(input["Sha256sum"]) |
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) { |
|||
tmpDst, err := ioutil.TempFile("", "linx-server-upload") |
|||
if err != nil { |
|||
return m, err |
|||
} |
|||
defer tmpDst.Close() |
|||
defer os.Remove(tmpDst.Name()) |
|||
|
|||
bytes, err := io.Copy(tmpDst, r) |
|||
if bytes == 0 { |
|||
return m, backends.FileEmptyError |
|||
} else if err != nil { |
|||
return m, err |
|||
} |
|||
|
|||
m, err = helpers.GenerateMetadata(r) |
|||
if err != nil { |
|||
return |
|||
} |
|||
m.Expiry = expiry |
|||
m.DeleteKey = deleteKey |
|||
// XXX: we may not be able to write this to AWS easily
|
|||
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
|
|||
|
|||
uploader := s3manager.NewUploaderWithClient(b.svc) |
|||
input := &s3manager.UploadInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
Body: tmpDst, |
|||
Metadata: mapMetadata(m), |
|||
} |
|||
_, err = uploader.Upload(input) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) PutMetadata(key string, m backends.Metadata) (err error) { |
|||
_, err = b.svc.CopyObject(&s3.CopyObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
CopySource: aws.String("/" + b.bucket + "/" + key), |
|||
Metadata: mapMetadata(m), |
|||
MetadataDirective: aws.String("REPLACE"), |
|||
}) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func (b S3Backend) Size(key string) (int64, error) { |
|||
input := &s3.HeadObjectInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
Key: aws.String(key), |
|||
} |
|||
result, err := b.svc.HeadObject(input) |
|||
if err != nil { |
|||
return 0, err |
|||
} |
|||
|
|||
return *result.ContentLength, nil |
|||
} |
|||
|
|||
func (b S3Backend) List() ([]string, error) { |
|||
var output []string |
|||
input := &s3.ListObjectsInput{ |
|||
Bucket: aws.String(b.bucket), |
|||
} |
|||
|
|||
results, err := b.svc.ListObjects(input) |
|||
if err != nil { |
|||
return nil, err |
|||
} |
|||
|
|||
for _, object := range results.Contents { |
|||
output = append(output, *object.Key) |
|||
} |
|||
|
|||
return output, nil |
|||
} |
|||
|
|||
func NewS3Backend(bucket string, region string, endpoint string, forcePathStyle bool) S3Backend { |
|||
awsConfig := &aws.Config{} |
|||
if region != "" { |
|||
awsConfig.Region = aws.String(region) |
|||
} |
|||
if endpoint != "" { |
|||
awsConfig.Endpoint = aws.String(endpoint) |
|||
} |
|||
if forcePathStyle == true { |
|||
awsConfig.S3ForcePathStyle = aws.Bool(true) |
|||
} |
|||
|
|||
sess := session.Must(session.NewSession(awsConfig)) |
|||
svc := s3.New(sess) |
|||
return S3Backend{bucket: bucket, svc: svc} |
|||
} |
@ -1,94 +1,67 @@ |
|||
#!/bin/bash |
|||
|
|||
version="$1" |
|||
mkdir -p "binairies/""$version" |
|||
name="binairies/""$version""/linx-server-v""$version""_" |
|||
|
|||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64 |
|||
rice append --exec "$name"osx-amd64 |
|||
|
|||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386 |
|||
rice append --exec "$name"osx-386 |
|||
|
|||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64 |
|||
rice append --exec "$name"freebsd-amd64 |
|||
|
|||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386 |
|||
rice append --exec "$name"freebsd-386 |
|||
|
|||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64 |
|||
rice append --exec "$name"openbsd-amd64 |
|||
|
|||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386 |
|||
rice append --exec "$name"openbsd-386 |
|||
|
|||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm |
|||
rice append --exec "$name"linux-arm |
|||
|
|||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64 |
|||
rice append --exec "$name"linux-amd64 |
|||
|
|||
GOOS=linux GOARCH=386 go build -o "$name"linux-386 |
|||
rice append --exec "$name"linux-386 |
|||
function build_binary_rice { |
|||
name="$1" |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch |
|||
rice append --exec "$name"osx-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch |
|||
rice append --exec "$name"freebsd-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch |
|||
rice append --exec "$name"openbsd-$arch |
|||
done |
|||
|
|||
for arch in arm arm64 amd64 386; do |
|||
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch |
|||
rice append --exec "$name"linux-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe |
|||
rice append --exec "$name"windows-$arch.exe |
|||
done |
|||
} |
|||
|
|||
function build_binary { |
|||
name="$1" |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch |
|||
done |
|||
|
|||
for arch in arm arm64 amd64 386; do |
|||
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch |
|||
done |
|||
|
|||
for arch in amd64 386; do |
|||
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe |
|||
done |
|||
} |
|||
|
|||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe |
|||
rice append --exec "$name"windows-amd64.exe |
|||
|
|||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe |
|||
rice append --exec "$name"windows-386.exe |
|||
version="$1" |
|||
mkdir -p "binaries/""$version" |
|||
|
|||
build_binary_rice "binaries/""$version""/linx-server-v""$version""_" |
|||
|
|||
cd linx-genkey |
|||
name="../binairies/""$version""/linx-genkey-v""$version""_" |
|||
|
|||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64 |
|||
|
|||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386 |
|||
|
|||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64 |
|||
|
|||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386 |
|||
|
|||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64 |
|||
|
|||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386 |
|||
|
|||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm |
|||
|
|||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64 |
|||
|
|||
GOOS=linux GOARCH=386 go build -o "$name"linux-386 |
|||
|
|||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe |
|||
|
|||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe |
|||
|
|||
build_binary "../binaries/""$version""/linx-genkey-v""$version""_" |
|||
cd .. |
|||
|
|||
|
|||
cd linx-cleanup |
|||
name="../binairies/""$version""/linx-cleanup-v""$version""_" |
|||
|
|||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64 |
|||
|
|||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386 |
|||
|
|||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64 |
|||
|
|||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386 |
|||
|
|||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64 |
|||
|
|||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386 |
|||
|
|||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm |
|||
|
|||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64 |
|||
|
|||
GOOS=linux GOARCH=386 go build -o "$name"linux-386 |
|||
|
|||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe |
|||
|
|||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe |
|||
|
|||
build_binary "../binaries/""$version""/linx-cleanup-v""$version""_" |
|||
cd .. |
@ -0,0 +1,70 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"io" |
|||
"sort" |
|||
) |
|||
|
|||
type ReadSeekerAt interface { |
|||
io.Reader |
|||
io.Seeker |
|||
io.ReaderAt |
|||
} |
|||
|
|||
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) { |
|||
if mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(r) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(r) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
} else if mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(r) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
files = append(files, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} else if mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(r, size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
files = append(files, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(files) |
|||
} |
|||
|
|||
return |
|||
} |
@ -0,0 +1,83 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"bytes" |
|||
"encoding/hex" |
|||
"io" |
|||
"unicode" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/minio/sha256-simd" |
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
func GenerateMetadata(r io.Reader) (m backends.Metadata, err error) { |
|||
// Since we don't have the ability to seek within a file, we can use a
|
|||
// Buffer in combination with a TeeReader to keep a copy of the bytes
|
|||
// we read when detecting the file type. These bytes are still needed
|
|||
// to hash the file and determine its size and cannot be discarded.
|
|||
var buf bytes.Buffer |
|||
teeReader := io.TeeReader(r, &buf) |
|||
|
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
_, err = teeReader.Read(header) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
// Create a Hash and a MultiReader that includes the Buffer we created
|
|||
// above along with the original Reader, which will have the rest of
|
|||
// the file.
|
|||
hasher := sha256.New() |
|||
multiReader := io.MultiReader(&buf, r) |
|||
|
|||
// Copy everything into the Hash, then use the number of bytes written
|
|||
// as the file size.
|
|||
var readLen int64 |
|||
readLen, err = io.Copy(hasher, multiReader) |
|||
if err != nil { |
|||
return |
|||
} else { |
|||
m.Size += readLen |
|||
} |
|||
|
|||
// Get the hex-encoded string version of the Hash checksum
|
|||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil)) |
|||
|
|||
// Use the bytes we extracted earlier and attempt to determine the file
|
|||
// type
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
m.Mimetype = "application/octet-stream" |
|||
return m, err |
|||
} else if kind.MIME.Value != "" { |
|||
m.Mimetype = kind.MIME.Value |
|||
} else if printable(header) { |
|||
m.Mimetype = "text/plain" |
|||
} else { |
|||
m.Mimetype = "application/octet-stream" |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
@ -0,0 +1,29 @@ |
|||
package helpers |
|||
|
|||
import ( |
|||
"strings" |
|||
"testing" |
|||
) |
|||
|
|||
func TestGenerateMetadata(t *testing.T) { |
|||
r := strings.NewReader("This is my test content") |
|||
m, err := GenerateMetadata(r) |
|||
if err != nil { |
|||
t.Fatal(err) |
|||
} |
|||
|
|||
expectedSha256sum := "966152d20a77e739716a625373ee15af16e8f4aec631a329a27da41c204b0171" |
|||
if m.Sha256sum != expectedSha256sum { |
|||
t.Fatalf("Sha256sum was %q instead of expected value of %q", m.Sha256sum, expectedSha256sum) |
|||
} |
|||
|
|||
expectedMimetype := "text/plain" |
|||
if m.Mimetype != expectedMimetype { |
|||
t.Fatalf("Mimetype was %q instead of expected value of %q", m.Mimetype, expectedMimetype) |
|||
} |
|||
|
|||
expectedSize := int64(23) |
|||
if m.Size != expectedSize { |
|||
t.Fatalf("Size was %d instead of expected value of %d", m.Size, expectedSize) |
|||
} |
|||
} |
@ -0,0 +1,27 @@ |
|||
Copyright (c) 2009 The Go Authors. All rights reserved. |
|||
|
|||
Redistribution and use in source and binary forms, with or without |
|||
modification, are permitted provided that the following conditions are |
|||
met: |
|||
|
|||
* Redistributions of source code must retain the above copyright |
|||
notice, this list of conditions and the following disclaimer. |
|||
* Redistributions in binary form must reproduce the above |
|||
copyright notice, this list of conditions and the following disclaimer |
|||
in the documentation and/or other materials provided with the |
|||
distribution. |
|||
* Neither the name of Google Inc. nor the names of its |
|||
contributors may be used to endorse or promote products derived from |
|||
this software without specific prior written permission. |
|||
|
|||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
|||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
|||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
|||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
|||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
@ -0,0 +1,218 @@ |
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
|||
// Use of this source code is governed by a BSD-style
|
|||
// license that can be found in the LICENSE file.
|
|||
|
|||
// HTTP file system request handler
|
|||
|
|||
package httputil |
|||
|
|||
import ( |
|||
"net/http" |
|||
"net/textproto" |
|||
"strings" |
|||
"time" |
|||
) |
|||
|
|||
// scanETag determines if a syntactically valid ETag is present at s. If so,
|
|||
// the ETag and remaining text after consuming ETag is returned. Otherwise,
|
|||
// it returns "", "".
|
|||
func scanETag(s string) (etag string, remain string) { |
|||
s = textproto.TrimString(s) |
|||
start := 0 |
|||
if strings.HasPrefix(s, "W/") { |
|||
start = 2 |
|||
} |
|||
if len(s[start:]) < 2 || s[start] != '"' { |
|||
return "", "" |
|||
} |
|||
// ETag is either W/"text" or "text".
|
|||
// See RFC 7232 2.3.
|
|||
for i := start + 1; i < len(s); i++ { |
|||
c := s[i] |
|||
switch { |
|||
// Character values allowed in ETags.
|
|||
case c == 0x21 || c >= 0x23 && c <= 0x7E || c >= 0x80: |
|||
case c == '"': |
|||
return s[:i+1], s[i+1:] |
|||
default: |
|||
return "", "" |
|||
} |
|||
} |
|||
return "", "" |
|||
} |
|||
|
|||
// etagStrongMatch reports whether a and b match using strong ETag comparison.
|
|||
// Assumes a and b are valid ETags.
|
|||
func etagStrongMatch(a, b string) bool { |
|||
return a == b && a != "" && a[0] == '"' |
|||
} |
|||
|
|||
// etagWeakMatch reports whether a and b match using weak ETag comparison.
|
|||
// Assumes a and b are valid ETags.
|
|||
func etagWeakMatch(a, b string) bool { |
|||
return strings.TrimPrefix(a, "W/") == strings.TrimPrefix(b, "W/") |
|||
} |
|||
|
|||
// condResult is the result of an HTTP request precondition check.
|
|||
// See https://tools.ietf.org/html/rfc7232 section 3.
|
|||
type condResult int |
|||
|
|||
const ( |
|||
condNone condResult = iota |
|||
condTrue |
|||
condFalse |
|||
) |
|||
|
|||
func checkIfMatch(w http.ResponseWriter, r *http.Request) condResult { |
|||
im := r.Header.Get("If-Match") |
|||
if im == "" { |
|||
return condNone |
|||
} |
|||
for { |
|||
im = textproto.TrimString(im) |
|||
if len(im) == 0 { |
|||
break |
|||
} |
|||
if im[0] == ',' { |
|||
im = im[1:] |
|||
continue |
|||
} |
|||
if im[0] == '*' { |
|||
return condTrue |
|||
} |
|||
etag, remain := scanETag(im) |
|||
if etag == "" { |
|||
break |
|||
} |
|||
if etagStrongMatch(etag, w.Header().Get("Etag")) { |
|||
return condTrue |
|||
} |
|||
im = remain |
|||
} |
|||
|
|||
return condFalse |
|||
} |
|||
|
|||
func checkIfUnmodifiedSince(r *http.Request, modtime time.Time) condResult { |
|||
ius := r.Header.Get("If-Unmodified-Since") |
|||
if ius == "" || isZeroTime(modtime) { |
|||
return condNone |
|||
} |
|||
if t, err := http.ParseTime(ius); err == nil { |
|||
// The Date-Modified header truncates sub-second precision, so
|
|||
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
|||
if modtime.Before(t.Add(1 * time.Second)) { |
|||
return condTrue |
|||
} |
|||
return condFalse |
|||
} |
|||
return condNone |
|||
} |
|||
|
|||
func checkIfNoneMatch(w http.ResponseWriter, r *http.Request) condResult { |
|||
inm := r.Header.Get("If-None-Match") |
|||
if inm == "" { |
|||
return condNone |
|||
} |
|||
buf := inm |
|||
for { |
|||
buf = textproto.TrimString(buf) |
|||
if len(buf) == 0 { |
|||
break |
|||
} |
|||
if buf[0] == ',' { |
|||
buf = buf[1:] |
|||
} |
|||
if buf[0] == '*' { |
|||
return condFalse |
|||
} |
|||
etag, remain := scanETag(buf) |
|||
if etag == "" { |
|||
break |
|||
} |
|||
if etagWeakMatch(etag, w.Header().Get("Etag")) { |
|||
return condFalse |
|||
} |
|||
buf = remain |
|||
} |
|||
return condTrue |
|||
} |
|||
|
|||
func checkIfModifiedSince(r *http.Request, modtime time.Time) condResult { |
|||
if r.Method != "GET" && r.Method != "HEAD" { |
|||
return condNone |
|||
} |
|||
ims := r.Header.Get("If-Modified-Since") |
|||
if ims == "" || isZeroTime(modtime) { |
|||
return condNone |
|||
} |
|||
t, err := http.ParseTime(ims) |
|||
if err != nil { |
|||
return condNone |
|||
} |
|||
// The Date-Modified header truncates sub-second precision, so
|
|||
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
|||
if modtime.Before(t.Add(1 * time.Second)) { |
|||
return condFalse |
|||
} |
|||
return condTrue |
|||
} |
|||
|
|||
var unixEpochTime = time.Unix(0, 0) |
|||
|
|||
// isZeroTime reports whether t is obviously unspecified (either zero or Unix()=0).
|
|||
func isZeroTime(t time.Time) bool { |
|||
return t.IsZero() || t.Equal(unixEpochTime) |
|||
} |
|||
|
|||
func setLastModified(w http.ResponseWriter, modtime time.Time) { |
|||
if !isZeroTime(modtime) { |
|||
w.Header().Set("Last-Modified", modtime.UTC().Format(http.TimeFormat)) |
|||
} |
|||
} |
|||
|
|||
func writeNotModified(w http.ResponseWriter) { |
|||
// RFC 7232 section 4.1:
|
|||
// a sender SHOULD NOT generate representation metadata other than the
|
|||
// above listed fields unless said metadata exists for the purpose of
|
|||
// guiding cache updates (e.g., Last-Modified might be useful if the
|
|||
// response does not have an ETag field).
|
|||
h := w.Header() |
|||
delete(h, "Content-Type") |
|||
delete(h, "Content-Length") |
|||
if h.Get("Etag") != "" { |
|||
delete(h, "Last-Modified") |
|||
} |
|||
w.WriteHeader(http.StatusNotModified) |
|||
} |
|||
|
|||
// CheckPreconditions evaluates request preconditions and reports whether a precondition
|
|||
// resulted in sending StatusNotModified or StatusPreconditionFailed.
|
|||
func CheckPreconditions(w http.ResponseWriter, r *http.Request, modtime time.Time) (done bool) { |
|||
// This function carefully follows RFC 7232 section 6.
|
|||
ch := checkIfMatch(w, r) |
|||
if ch == condNone { |
|||
ch = checkIfUnmodifiedSince(r, modtime) |
|||
} |
|||
if ch == condFalse { |
|||
w.WriteHeader(http.StatusPreconditionFailed) |
|||
return true |
|||
} |
|||
switch checkIfNoneMatch(w, r) { |
|||
case condFalse: |
|||
if r.Method == "GET" || r.Method == "HEAD" { |
|||
writeNotModified(w) |
|||
return true |
|||
} else { |
|||
w.WriteHeader(http.StatusPreconditionFailed) |
|||
return true |
|||
} |
|||
case condNone: |
|||
if checkIfModifiedSince(r, modtime) == condFalse { |
|||
writeNotModified(w) |
|||
return true |
|||
} |
|||
} |
|||
|
|||
return false |
|||
} |
@ -1,165 +0,0 @@ |
|||
package main |
|||
|
|||
import ( |
|||
"archive/tar" |
|||
"archive/zip" |
|||
"compress/bzip2" |
|||
"compress/gzip" |
|||
"crypto/sha256" |
|||
"encoding/hex" |
|||
"errors" |
|||
"io" |
|||
"sort" |
|||
"time" |
|||
"unicode" |
|||
|
|||
"github.com/andreimarcu/linx-server/backends" |
|||
"github.com/andreimarcu/linx-server/expiry" |
|||
"github.com/dchest/uniuri" |
|||
"gopkg.in/h2non/filetype.v1" |
|||
) |
|||
|
|||
var NotFoundErr = errors.New("File not found.") |
|||
|
|||
func generateMetadata(fName string, exp time.Time, delKey string) (m backends.Metadata, err error) { |
|||
file, err := fileBackend.Open(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
defer file.Close() |
|||
|
|||
m.Size, err = fileBackend.Size(fName) |
|||
if err != nil { |
|||
return |
|||
} |
|||
|
|||
m.Expiry = exp |
|||
|
|||
if delKey == "" { |
|||
m.DeleteKey = uniuri.NewLen(30) |
|||
} else { |
|||
m.DeleteKey = delKey |
|||
} |
|||
|
|||
// Get first 512 bytes for mimetype detection
|
|||
header := make([]byte, 512) |
|||
file.Read(header) |
|||
|
|||
kind, err := filetype.Match(header) |
|||
if err != nil { |
|||
m.Mimetype = "application/octet-stream" |
|||
} else { |
|||
m.Mimetype = kind.MIME.Value |
|||
} |
|||
|
|||
if m.Mimetype == "" { |
|||
// Check if the file seems anything like text
|
|||
if printable(header) { |
|||
m.Mimetype = "text/plain" |
|||
} else { |
|||
m.Mimetype = "application/octet-stream" |
|||
} |
|||
} |
|||
|
|||
// Compute the sha256sum
|
|||
hasher := sha256.New() |
|||
file.Seek(0, 0) |
|||
_, err = io.Copy(hasher, file) |
|||
if err == nil { |
|||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil)) |
|||
} |
|||
file.Seek(0, 0) |
|||
|
|||
// If archive, grab list of filenames
|
|||
if m.Mimetype == "application/x-tar" { |
|||
tReadr := tar.NewReader(file) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/x-gzip" { |
|||
gzf, err := gzip.NewReader(file) |
|||
if err == nil { |
|||
tReadr := tar.NewReader(gzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
} else if m.Mimetype == "application/x-bzip" { |
|||
bzf := bzip2.NewReader(file) |
|||
tReadr := tar.NewReader(bzf) |
|||
for { |
|||
hdr, err := tReadr.Next() |
|||
if err == io.EOF || err != nil { |
|||
break |
|||
} |
|||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} else if m.Mimetype == "application/zip" { |
|||
zf, err := zip.NewReader(file, m.Size) |
|||
if err == nil { |
|||
for _, f := range zf.File { |
|||
m.ArchiveFiles = append(m.ArchiveFiles, f.Name) |
|||
} |
|||
} |
|||
sort.Strings(m.ArchiveFiles) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func metadataWrite(filename string, metadata *backends.Metadata) error { |
|||
return metaBackend.Put(filename, metadata) |
|||
} |
|||
|
|||
func metadataRead(filename string) (metadata backends.Metadata, err error) { |
|||
metadata, err = metaBackend.Get(filename) |
|||
if err != nil { |
|||
// Metadata does not exist, generate one
|
|||
newMData, err := generateMetadata(filename, expiry.NeverExpire, "") |
|||
if err != nil { |
|||
return metadata, err |
|||
} |
|||
metadataWrite(filename, &newMData) |
|||
|
|||
metadata, err = metaBackend.Get(filename) |
|||
} |
|||
|
|||
return |
|||
} |
|||
|
|||
func printable(data []byte) bool { |
|||
for i, b := range data { |
|||
r := rune(b) |
|||
|
|||
// A null terminator that's not at the beginning of the file
|
|||
if r == 0 && i == 0 { |
|||
return false |
|||
} else if r == 0 && i < 0 { |
|||
continue |
|||
} |
|||
|
|||
if r > unicode.MaxASCII { |
|||
return false |
|||
} |
|||
|
|||
} |
|||
|
|||
return true |
|||
} |
@ -1,89 +0,0 @@ |
|||
package main |
|||
|
|||
import ( |
|||
"bytes" |
|||
"encoding/json" |
|||
"errors" |
|||
"net/http" |
|||
|
|||
"github.com/zenazn/goji/web" |
|||
) |
|||
|
|||
type shortenerRequest struct { |
|||
LongURL string `json:"longUrl"` |
|||
} |
|||
|
|||
type shortenerResponse struct { |
|||
Kind string `json:"kind"` |
|||
ID string `json:"id"` |
|||
LongURL string `json:"longUrl"` |
|||
Error struct { |
|||
Code int `json:"code"` |
|||
Message string `json:"message"` |
|||
} `json:"error"` |
|||
} |
|||
|
|||
func shortURLHandler(c web.C, w http.ResponseWriter, r *http.Request) { |
|||
fileName := c.URLParams["name"] |
|||
|
|||
err := checkFile(fileName) |
|||
if err == NotFoundErr { |
|||
notFoundHandler(c, w, r) |
|||
return |
|||
} |
|||
|
|||
metadata, err := metadataRead(fileName) |
|||
if err != nil { |
|||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.") |
|||
return |
|||
} |
|||
|
|||
if metadata.ShortURL == "" { |
|||
url, err := shortenURL(getSiteURL(r) + fileName) |
|||
if err != nil { |
|||
oopsHandler(c, w, r, RespJSON, err.Error()) |
|||
return |
|||
} |
|||
|
|||
metadata.ShortURL = url |
|||
|
|||
err = metadataWrite(fileName, &metadata) |
|||
if err != nil { |
|||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.") |
|||
return |
|||
} |
|||
} |
|||
|
|||
js, _ := json.Marshal(map[string]string{ |
|||
"shortUrl": metadata.ShortURL, |
|||
}) |
|||
w.Write(js) |
|||
return |
|||
} |
|||
|
|||
func shortenURL(url string) (string, error) { |
|||
apiURL := "https://www.googleapis.com/urlshortener/v1/url?key=" + Config.googleShorterAPIKey |
|||
jsonStr, _ := json.Marshal(shortenerRequest{LongURL: url}) |
|||
|
|||
req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonStr)) |
|||
req.Header.Set("Content-Type", "application/json") |
|||
|
|||
client := &http.Client{} |
|||
resp, err := client.Do(req) |
|||
if err != nil { |
|||
return "", err |
|||
} |
|||
defer resp.Body.Close() |
|||
|
|||
shortenerResponse := new(shortenerResponse) |
|||
err = json.NewDecoder(resp.Body).Decode(shortenerResponse) |
|||
if err != nil { |
|||
return "", err |
|||
} |
|||
|
|||
if shortenerResponse.Error.Message != "" { |
|||
return "", errors.New(shortenerResponse.Error.Message) |
|||
} |
|||
|
|||
return shortenerResponse.ID, nil |
|||
} |
@ -1,58 +1,58 @@ |
|||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
|||
|
|||
var navlist = document.getElementById("info").getElementsByClassName("right")[0]; |
|||
|
|||
init(); |
|||
|
|||
function init() { |
|||
var editA = document.createElement('a'); |
|||
|
|||
editA.setAttribute("href", "#"); |
|||
editA.addEventListener('click', function(ev) { |
|||
edit(ev); |
|||
return false; |
|||
}); |
|||
editA.innerHTML = "edit"; |
|||
|
|||
var separator = document.createTextNode(" | "); |
|||
navlist.insertBefore(editA, navlist.firstChild); |
|||
navlist.insertBefore(separator, navlist.children[1]); |
|||
|
|||
document.getElementById('save').addEventListener('click', paste); |
|||
document.getElementById('wordwrap').addEventListener('click', wrap); |
|||
} |
|||
|
|||
function edit(ev) { |
|||
ev.preventDefault(); |
|||
|
|||
navlist.remove(); |
|||
document.getElementById("filename").remove(); |
|||
document.getElementById("editform").style.display = "block"; |
|||
|
|||
var normalcontent = document.getElementById("normal-content"); |
|||
normalcontent.removeChild(document.getElementById("normal-code")); |
|||
|
|||
var editordiv = document.getElementById("editor"); |
|||
editordiv.style.display = "block"; |
|||
editordiv.addEventListener('keydown', handleTab); |
|||
} |
|||
|
|||
function paste(ev) { |
|||
var editordiv = document.getElementById("editor"); |
|||
document.getElementById("newcontent").value = editordiv.value; |
|||
document.forms["reply"].submit(); |
|||
} |
|||
|
|||
function wrap(ev) { |
|||
if (document.getElementById("wordwrap").checked) { |
|||
document.getElementById("codeb").style.wordWrap = "break-word"; |
|||
document.getElementById("codeb").style.whiteSpace = "pre-wrap"; |
|||
} |
|||
|
|||
else { |
|||
document.getElementById("codeb").style.wordWrap = "normal"; |
|||
document.getElementById("codeb").style.whiteSpace = "pre"; |
|||
} |
|||
} |
|||
|
|||
// @license-end
|
|||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
|||
|
|||
var navlist = document.getElementById("info").getElementsByClassName("info-actions")[0]; |
|||
|
|||
init(); |
|||
|
|||
function init() { |
|||
var editA = document.createElement('a'); |
|||
|
|||
editA.setAttribute("href", "#"); |
|||
editA.addEventListener('click', function(ev) { |
|||
edit(ev); |
|||
return false; |
|||
}); |
|||
editA.innerHTML = "edit"; |
|||
|
|||
var separator = document.createTextNode(" | "); |
|||
navlist.insertBefore(editA, navlist.firstChild); |
|||
navlist.insertBefore(separator, navlist.children[1]); |
|||
|
|||
document.getElementById('save').addEventListener('click', paste); |
|||
document.getElementById('wordwrap').addEventListener('click', wrap); |
|||
} |
|||
|
|||
function edit(ev) { |
|||
ev.preventDefault(); |
|||
|
|||
navlist.remove(); |
|||
document.getElementById("filename").remove(); |
|||
document.getElementById("editform").style.display = "block"; |
|||
|
|||
var normalcontent = document.getElementById("normal-content"); |
|||
normalcontent.removeChild(document.getElementById("normal-code")); |
|||
|
|||
var editordiv = document.getElementById("inplace-editor"); |
|||
editordiv.style.display = "block"; |
|||
editordiv.addEventListener('keydown', handleTab); |
|||
} |
|||
|
|||
function paste(ev) { |
|||
var editordiv = document.getElementById("inplace-editor"); |
|||
document.getElementById("newcontent").value = editordiv.value; |
|||
document.forms["reply"].submit(); |
|||
} |
|||
|
|||
function wrap(ev) { |
|||
if (document.getElementById("wordwrap").checked) { |
|||
document.getElementById("codeb").style.wordWrap = "break-word"; |
|||
document.getElementById("codeb").style.whiteSpace = "pre-wrap"; |
|||
} |
|||
|
|||
else { |
|||
document.getElementById("codeb").style.wordWrap = "normal"; |
|||
document.getElementById("codeb").style.whiteSpace = "pre"; |
|||
} |
|||
} |
|||
|
|||
// @license-end
|
@ -1,39 +0,0 @@ |
|||
document.getElementById('shorturl').addEventListener('click', function (e) { |
|||
e.preventDefault(); |
|||
|
|||
if (e.target.href !== "") return; |
|||
|
|||
xhr = new XMLHttpRequest(); |
|||
xhr.open("GET", e.target.dataset.url, true); |
|||
xhr.setRequestHeader('Accept', 'application/json'); |
|||
xhr.onreadystatechange = function () { |
|||
if (xhr.readyState === 4) { |
|||
var resp = JSON.parse(xhr.responseText); |
|||
|
|||
if (xhr.status === 200 && resp.error == null) { |
|||
e.target.innerText = resp.shortUrl; |
|||
e.target.href = resp.shortUrl; |
|||
e.target.setAttribute('aria-label', 'Click to copy into clipboard') |
|||
} else { |
|||
e.target.setAttribute('aria-label', resp.error) |
|||
} |
|||
} |
|||
}; |
|||
xhr.send(); |
|||
}); |
|||
|
|||
var clipboard = new Clipboard("#shorturl", { |
|||
text: function (trigger) { |
|||
if (trigger.href == null) return; |
|||
|
|||
return trigger.href; |
|||
} |
|||
}); |
|||
|
|||
clipboard.on('success', function (e) { |
|||
e.trigger.setAttribute('aria-label', 'Successfully copied') |
|||
}); |
|||
|
|||
clipboard.on('error', function (e) { |
|||
e.trigger.setAttribute('aria-label', 'Your browser does not support coping to clipboard') |
|||
}); |
@ -1,5 +1,7 @@ |
|||
{% extends "base.html" %} |
|||
|
|||
{% block content %} |
|||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a> |
|||
<div class="error-404"> |
|||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a> |
|||
</div> |
|||
{% endblock %} |
@ -1,7 +1,7 @@ |
|||
{% extends "base.html" %} |
|||
|
|||
{% block main %} |
|||
<a href="{{ sitepath }}selif/{{ filename }}"> |
|||
<img class="display-image" src="{{ sitepath }}selif/{{ filename }}" /> |
|||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}"> |
|||
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" /> |
|||
</a> |
|||
{% endblock %} |
@ -1,10 +1,10 @@ |
|||
{% extends "base.html" %} |
|||
|
|||
{% block main %} |
|||
<object class="display-pdf" data="{{ sitepath }}selif/{{ filename }}" type="application/pdf"> |
|||
<object class="display-pdf" data="{{ sitepath }}{{ selifpath }}{{ filename }}" type="application/pdf"> |
|||
|
|||
<p>It appears your Web browser is not configured to display PDF files. |
|||
No worries, just <a href="{{ sitepath }}selif/{{ filename }}">click here to download the PDF file.</a></p> |
|||
No worries, just <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here to download the PDF file.</a></p> |
|||
|
|||
</object> |
|||
{% endblock %} |
@ -0,0 +1,28 @@ |
|||
package torrent |
|||
|
|||
import ( |
|||
"crypto/sha1" |
|||
) |
|||
|
|||
const ( |
|||
TORRENT_PIECE_LENGTH = 262144 |
|||
) |
|||
|
|||
type TorrentInfo struct { |
|||
PieceLength int `bencode:"piece length"` |
|||
Pieces string `bencode:"pieces"` |
|||
Name string `bencode:"name"` |
|||
Length int `bencode:"length"` |
|||
} |
|||
|
|||
type Torrent struct { |
|||
Encoding string `bencode:"encoding"` |
|||
Info TorrentInfo `bencode:"info"` |
|||
UrlList []string `bencode:"url-list"` |
|||
} |
|||
|
|||
func HashPiece(piece []byte) []byte { |
|||
h := sha1.New() |
|||
h.Write(piece) |
|||
return h.Sum(nil) |
|||
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue