Browse Source

Merge pull request #1 from andreimarcu/master

1
pull/183/head
mrtesla007 5 years ago
committed by GitHub
parent
commit
4a959cacc5
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      .gitignore
  2. 4
      .travis.yml
  3. 19
      Dockerfile
  4. 37
      README.md
  5. 155
      backends/localfs/localfs.go
  6. 6
      backends/meta.go
  7. 73
      backends/metajson/metajson.go
  8. 210
      backends/s3/s3.go
  9. 21
      backends/storage.go
  10. 141
      build.sh
  11. 11
      csp.go
  12. 12
      csp_test.go
  13. 19
      delete.go
  14. 69
      display.go
  15. 2
      expiry.go
  16. 61
      fileserve.go
  17. 70
      helpers/archive.go
  18. 83
      helpers/helpers.go
  19. 29
      helpers/helpers_test.go
  20. 27
      httputil/LICENSE
  21. 218
      httputil/conditional.go
  22. 10
      linx-cleanup/cleanup.go
  23. 165
      meta.go
  24. 46
      pages.go
  25. 68
      server.go
  26. 206
      server_test.go
  27. 89
      shorturl.go
  28. 10
      static/css/dropzone.css
  29. 3
      static/css/github-markdown.css
  30. 211
      static/css/linx.css
  31. 116
      static/js/bin.js
  32. 39
      static/js/shorturl.js
  33. 171
      static/js/upload.js
  34. 1
      templates.go
  35. 4
      templates/404.html
  36. 16
      templates/API.html
  37. 3
      templates/base.html
  38. 4
      templates/display/audio.html
  39. 22
      templates/display/base.html
  40. 37
      templates/display/bin.html
  41. 2
      templates/display/file.html
  42. 4
      templates/display/image.html
  43. 4
      templates/display/pdf.html
  44. 34
      templates/display/story.html
  45. 4
      templates/display/video.html
  46. 6
      templates/index.html
  47. 20
      templates/paste.html
  48. 64
      torrent.go
  49. 28
      torrent/torrent.go
  50. 7
      torrent_test.go
  51. 125
      upload.go

1
.gitignore

@ -29,4 +29,5 @@ _testmain.go
linx-server
files/
meta/
binaries/
linx-cleanup

4
.travis.yml

@ -1,8 +1,8 @@
language: go
go:
- 1.5
- 1.6
- "1.10"
- "1.11"
before_script:
- go vet ./...

19
Dockerfile

@ -1,15 +1,28 @@
FROM golang:alpine
FROM golang:alpine3.8 AS build
COPY . /go/src/github.com/andreimarcu/linx-server
WORKDIR /go/src/github.com/andreimarcu/linx-server
RUN set -ex \
&& apk add --no-cache --virtual .build-deps git \
&& go get github.com/andreimarcu/linx-server \
&& go get -v . \
&& apk del .build-deps
FROM alpine:3.8
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server
ENV GOPATH /go
ENV SSL_CERT_FILE /etc/ssl/cert.pem
COPY static /go/src/github.com/andreimarcu/linx-server/static/
COPY templates /go/src/github.com/andreimarcu/linx-server/templates/
RUN mkdir -p /data/files && mkdir -p /data/meta && chown -R 65534:65534 /data
VOLUME ["/data/files", "/data/meta"]
EXPOSE 8080
USER nobody
ENTRYPOINT ["/go/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
ENTRYPOINT ["/usr/local/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
CMD ["-sitename=linx", "-allowhotlink"]

37
README.md

@ -16,8 +16,7 @@ Self-hosted file/media sharing website.
### Screenshots
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530123/4211e946-7372-11e5-9cb5-9956c5c49d95.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530124/4217db8a-7372-11e5-957d-b3abb873dc80.png" />
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530844/48d6d4e2-7379-11e5-8886-d4c32c416cbc.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530845/48dc9ae4-7379-11e5-9e59-959f7c40a573.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530846/48df08ec-7379-11e5-89f6-5c3f6372384d.png" />
<img width="200" src="https://user-images.githubusercontent.com/4650950/51735725-0033cf00-203d-11e9-8a97-f543330a92ec.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735724-0033cf00-203d-11e9-8fe0-77442eaa8705.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735726-0033cf00-203d-11e9-9fca-095a97e46ce8.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735728-0033cf00-203d-11e9-90e9-4f2d36332fc4.png" />
Get release and run
@ -41,18 +40,34 @@ allowhotlink = true
#### Options
- ```-bind 127.0.0.1:8080``` -- what to bind to (default is 127.0.0.1:8080)
- ```-sitename myLinx``` -- the site name displayed on top (default is inferred from Host header)
- ```-siteurl "http://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
- ```-filespath files/``` -- Path to store uploads (default is files/)
- ```-metapath meta/``` -- Path to store information about uploads (default is meta/)
- ```-siteurl "https://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
- ```-selifpath "selif"``` -- path relative to site base url (the "selif" in mylinx.example.org/selif/image.jpg) where files are accessed directly (default: selif)
- ```-maxsize 4294967296``` -- maximum upload file size in bytes (default 4GB)
- ```-maxexpiry 86400``` -- maximum expiration time in seconds (default is 0, which is no expiry)
- ```-allowhotlink``` -- Allow file hotlinking
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
- ```-refererpolicy "..."``` -- Referrer-Policy header for pages (default is "same-origin")
- ```-filereferrerpolicy "..."``` -- Referrer-Policy header for files (default is "same-origin")
- ```-xframeoptions "..." ``` -- X-Frame-Options header (default is "SAMEORIGIN")
- ```-remoteuploads``` -- (optionally) enable remote uploads (/upload?url=https://...)
- ```-nologs``` -- (optionally) disable request logs in stdout
- ```-googleapikey``` -- (optionally) API Key for Google's URL Shortener. ([How to create one](https://developers.google.com/url-shortener/v1/getting_started#APIKey))
- ```-force-random-filename``` -- (optionally) force the use of random filenames
#### Require API Keys for uploads
- ```-authfile path/to/authfile``` -- (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
- ```-remoteauthfile path/to/remoteauthfile``` -- (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
#### Storage backends
The following storage backends are available:
|Name|Notes|Options
|----|-----|-------
|LocalFS|Enabled by default, this backend uses the filesystem|```-filespath files/``` -- Path to store uploads (default is files/)<br />```-metapath meta/``` -- Path to store information about uploads (default is meta/)|
|S3|Use with any S3-compatible provider.<br> This implementation will stream files through the linx instance (every download will request and stream the file from the S3 bucket).<br><br>For high-traffic environments, one might consider using an external caching layer such as described [in this article](https://blog.sentry.io/2017/03/01/dodging-s3-downtime-with-nginx-and-haproxy.html).|```-s3-endpoint https://...``` -- S3 endpoint<br>```-s3-region us-east-1``` -- S3 region<br>```-s3-bucket mybucket``` -- S3 bucket to use for files and metadata<br>```-s3-force-path-style``` (optional) -- force path-style addresing (e.g. https://<span></span>s3.amazonaws.com/linx/example.txt)<br><br>Environment variables to provide:<br>```AWS_ACCESS_KEY_ID``` -- the S3 access key<br>```AWS_SECRET_ACCESS_KEY ``` -- the S3 secret key<br>```AWS_SESSION_TOKEN``` (optional) -- the S3 session token|
#### SSL with built-in server
- ```-certfile path/to/your.crt``` -- Path to the ssl certificate (required if you want to use the https server)
@ -64,12 +79,6 @@ allowhotlink = true
#### Use with fastcgi
- ```-fastcgi``` -- serve through fastcgi
#### Require API Keys for uploads
- ```-authfile path/to/authfile``` -- (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
- ```-remoteauthfile path/to/remoteauthfile``` -- (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
Cleaning up expired files
-------------------------

155
backends/localfs/localfs.go

@ -1,63 +1,163 @@
package localfs
import (
"errors"
"encoding/json"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/helpers"
)
type LocalfsBackend struct {
basePath string
metaPath string
filesPath string
}
func (b LocalfsBackend) Delete(key string) error {
return os.Remove(path.Join(b.basePath, key))
type MetadataJSON struct {
DeleteKey string `json:"delete_key"`
Sha256sum string `json:"sha256sum"`
Mimetype string `json:"mimetype"`
Size int64 `json:"size"`
Expiry int64 `json:"expiry"`
ArchiveFiles []string `json:"archive_files,omitempty"`
}
func (b LocalfsBackend) Delete(key string) (err error) {
err = os.Remove(path.Join(b.filesPath, key))
if err != nil {
return
}
err = os.Remove(path.Join(b.metaPath, key))
return
}
func (b LocalfsBackend) Exists(key string) (bool, error) {
_, err := os.Stat(path.Join(b.basePath, key))
_, err := os.Stat(path.Join(b.filesPath, key))
return err == nil, err
}
func (b LocalfsBackend) Get(key string) ([]byte, error) {
return ioutil.ReadFile(path.Join(b.basePath, key))
func (b LocalfsBackend) Head(key string) (metadata backends.Metadata, err error) {
f, err := os.Open(path.Join(b.metaPath, key))
if os.IsNotExist(err) {
return metadata, backends.NotFoundErr
} else if err != nil {
return metadata, backends.BadMetadata
}
defer f.Close()
decoder := json.NewDecoder(f)
mjson := MetadataJSON{}
if err := decoder.Decode(&mjson); err != nil {
return metadata, backends.BadMetadata
}
metadata.DeleteKey = mjson.DeleteKey
metadata.Mimetype = mjson.Mimetype
metadata.ArchiveFiles = mjson.ArchiveFiles
metadata.Sha256sum = mjson.Sha256sum
metadata.Expiry = time.Unix(mjson.Expiry, 0)
metadata.Size = mjson.Size
return
}
func (b LocalfsBackend) Put(key string, r io.Reader) (int64, error) {
dst, err := os.Create(path.Join(b.basePath, key))
func (b LocalfsBackend) Get(key string) (metadata backends.Metadata, f io.ReadCloser, err error) {
metadata, err = b.Head(key)
if err != nil {
return 0, err
return
}
f, err = os.Open(path.Join(b.filesPath, key))
if err != nil {
return
}
return
}
func (b LocalfsBackend) writeMetadata(key string, metadata backends.Metadata) error {
metaPath := path.Join(b.metaPath, key)
mjson := MetadataJSON{
DeleteKey: metadata.DeleteKey,
Mimetype: metadata.Mimetype,
ArchiveFiles: metadata.ArchiveFiles,
Sha256sum: metadata.Sha256sum,
Expiry: metadata.Expiry.Unix(),
Size: metadata.Size,
}
dst, err := os.Create(metaPath)
if err != nil {
return err
}
defer dst.Close()
encoder := json.NewEncoder(dst)
err = encoder.Encode(mjson)
if err != nil {
os.Remove(metaPath)
return err
}
return nil
}
func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
filePath := path.Join(b.filesPath, key)
dst, err := os.Create(filePath)
if err != nil {
return
}
defer dst.Close()
bytes, err := io.Copy(dst, r)
if bytes == 0 {
b.Delete(key)
return bytes, errors.New("Empty file")
os.Remove(filePath)
return m, backends.FileEmptyError
} else if err != nil {
b.Delete(key)
return bytes, err
os.Remove(filePath)
return m, err
}
return bytes, err
}
dst.Seek(0 ,0)
m, err = helpers.GenerateMetadata(dst)
if err != nil {
os.Remove(filePath)
return
}
dst.Seek(0 ,0)
func (b LocalfsBackend) Open(key string) (backends.ReadSeekCloser, error) {
return os.Open(path.Join(b.basePath, key))
m.Expiry = expiry
m.DeleteKey = deleteKey
m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, dst)
err = b.writeMetadata(key, m)
if err != nil {
os.Remove(filePath)
return
}
return
}
func (b LocalfsBackend) ServeFile(key string, w http.ResponseWriter, r *http.Request) {
filePath := path.Join(b.basePath, key)
http.ServeFile(w, r, filePath)
func (b LocalfsBackend) PutMetadata(key string, m backends.Metadata) (err error) {
err = b.writeMetadata(key, m)
if err != nil {
return
}
return
}
func (b LocalfsBackend) Size(key string) (int64, error) {
fileInfo, err := os.Stat(path.Join(b.basePath, key))
fileInfo, err := os.Stat(path.Join(b.filesPath, key))
if err != nil {
return 0, err
}
@ -68,7 +168,7 @@ func (b LocalfsBackend) Size(key string) (int64, error) {
func (b LocalfsBackend) List() ([]string, error) {
var output []string
files, err := ioutil.ReadDir(b.basePath)
files, err := ioutil.ReadDir(b.filesPath)
if err != nil {
return nil, err
}
@ -80,6 +180,9 @@ func (b LocalfsBackend) List() ([]string, error) {
return output, nil
}
func NewLocalfsBackend(basePath string) LocalfsBackend {
return LocalfsBackend{basePath: basePath}
func NewLocalfsBackend(metaPath string, filesPath string) LocalfsBackend {
return LocalfsBackend{
metaPath: metaPath,
filesPath: filesPath,
}
}

6
backends/meta.go

@ -5,11 +5,6 @@ import (
"time"
)
type MetaBackend interface {
Get(key string) (Metadata, error)
Put(key string, metadata *Metadata) error
}
type Metadata struct {
DeleteKey string
Sha256sum string
@ -17,7 +12,6 @@ type Metadata struct {
Size int64
Expiry time.Time
ArchiveFiles []string
ShortURL string
}
var BadMetadata = errors.New("Corrupted metadata.")

73
backends/metajson/metajson.go

@ -1,73 +0,0 @@
package metajson
import (
"bytes"
"encoding/json"
"time"
"github.com/andreimarcu/linx-server/backends"
)
type MetadataJSON struct {
DeleteKey string `json:"delete_key"`
Sha256sum string `json:"sha256sum"`
Mimetype string `json:"mimetype"`
Size int64 `json:"size"`
Expiry int64 `json:"expiry"`
ArchiveFiles []string `json:"archive_files,omitempty"`
ShortURL string `json:"short_url"`
}
type MetaJSONBackend struct {
storage backends.MetaStorageBackend
}
func (m MetaJSONBackend) Put(key string, metadata *backends.Metadata) error {
mjson := MetadataJSON{}
mjson.DeleteKey = metadata.DeleteKey
mjson.Mimetype = metadata.Mimetype
mjson.ArchiveFiles = metadata.ArchiveFiles
mjson.Sha256sum = metadata.Sha256sum
mjson.Expiry = metadata.Expiry.Unix()
mjson.Size = metadata.Size
mjson.ShortURL = metadata.ShortURL
byt, err := json.Marshal(mjson)
if err != nil {
return err
}
if _, err := m.storage.Put(key, bytes.NewBuffer(byt)); err != nil {
return err
}
return nil
}
func (m MetaJSONBackend) Get(key string) (metadata backends.Metadata, err error) {
b, err := m.storage.Get(key)
if err != nil {
return metadata, backends.BadMetadata
}
mjson := MetadataJSON{}
err = json.Unmarshal(b, &mjson)
if err != nil {
return metadata, backends.BadMetadata
}
metadata.DeleteKey = mjson.DeleteKey
metadata.Mimetype = mjson.Mimetype
metadata.ArchiveFiles = mjson.ArchiveFiles
metadata.Sha256sum = mjson.Sha256sum
metadata.Expiry = time.Unix(mjson.Expiry, 0)
metadata.Size = mjson.Size
metadata.ShortURL = mjson.ShortURL
return
}
func NewMetaJSONBackend(storage backends.MetaStorageBackend) MetaJSONBackend {
return MetaJSONBackend{storage: storage}
}

210
backends/s3/s3.go

@ -0,0 +1,210 @@
package s3
import (
"io"
"io/ioutil"
"os"
"strconv"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/helpers"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
type S3Backend struct {
bucket string
svc *s3.S3
}
func (b S3Backend) Delete(key string) error {
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
return err
}
return nil
}
func (b S3Backend) Exists(key string) (bool, error) {
_, err := b.svc.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
return err == nil, err
}
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) {
var result *s3.HeadObjectOutput
result, err = b.svc.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
err = backends.NotFoundErr
}
}
return
}
metadata, err = unmapMetadata(result.Metadata)
return
}
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) {
var result *s3.GetObjectOutput
result, err = b.svc.GetObject(&s3.GetObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
err = backends.NotFoundErr
}
}
return
}
metadata, err = unmapMetadata(result.Metadata)
r = result.Body
return
}
func mapMetadata(m backends.Metadata) map[string]*string {
return map[string]*string{
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)),
"Delete_key": aws.String(m.DeleteKey),
"Size": aws.String(strconv.FormatInt(m.Size, 10)),
"Mimetype": aws.String(m.Mimetype),
"Sha256sum": aws.String(m.Sha256sum),
}
}
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) {
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64)
if err != nil {
return m, err
}
m.Expiry = time.Unix(expiry, 0)
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64)
if err != nil {
return
}
m.DeleteKey = aws.StringValue(input["Delete_key"])
m.Mimetype = aws.StringValue(input["Mimetype"])
m.Sha256sum = aws.StringValue(input["Sha256sum"])
return
}
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
tmpDst, err := ioutil.TempFile("", "linx-server-upload")
if err != nil {
return m, err
}
defer tmpDst.Close()
defer os.Remove(tmpDst.Name())
bytes, err := io.Copy(tmpDst, r)
if bytes == 0 {
return m, backends.FileEmptyError
} else if err != nil {
return m, err
}
m, err = helpers.GenerateMetadata(r)
if err != nil {
return
}
m.Expiry = expiry
m.DeleteKey = deleteKey
// XXX: we may not be able to write this to AWS easily
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
uploader := s3manager.NewUploaderWithClient(b.svc)
input := &s3manager.UploadInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
Body: tmpDst,
Metadata: mapMetadata(m),
}
_, err = uploader.Upload(input)
if err != nil {
return
}
return
}
func (b S3Backend) PutMetadata(key string, m backends.Metadata) (err error) {
_, err = b.svc.CopyObject(&s3.CopyObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
CopySource: aws.String("/" + b.bucket + "/" + key),
Metadata: mapMetadata(m),
MetadataDirective: aws.String("REPLACE"),
})
if err != nil {
return
}
return
}
func (b S3Backend) Size(key string) (int64, error) {
input := &s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
}
result, err := b.svc.HeadObject(input)
if err != nil {
return 0, err
}
return *result.ContentLength, nil
}
func (b S3Backend) List() ([]string, error) {
var output []string
input := &s3.ListObjectsInput{
Bucket: aws.String(b.bucket),
}
results, err := b.svc.ListObjects(input)
if err != nil {
return nil, err
}
for _, object := range results.Contents {
output = append(output, *object.Key)
}
return output, nil
}
func NewS3Backend(bucket string, region string, endpoint string, forcePathStyle bool) S3Backend {
awsConfig := &aws.Config{}
if region != "" {
awsConfig.Region = aws.String(region)
}
if endpoint != "" {
awsConfig.Endpoint = aws.String(endpoint)
}
if forcePathStyle == true {
awsConfig.S3ForcePathStyle = aws.Bool(true)
}
sess := session.Must(session.NewSession(awsConfig))
svc := s3.New(sess)
return S3Backend{bucket: bucket, svc: svc}
}

21
backends/storage.go

@ -1,24 +1,18 @@
package backends
import (
"errors"
"io"
"net/http"
"time"
)
type ReadSeekCloser interface {
io.Reader
io.Closer
io.Seeker
io.ReaderAt
}
type StorageBackend interface {
Delete(key string) error
Exists(key string) (bool, error)
Get(key string) ([]byte, error)
Put(key string, r io.Reader) (int64, error)
Open(key string) (ReadSeekCloser, error)
ServeFile(key string, w http.ResponseWriter, r *http.Request)
Head(key string) (Metadata, error)
Get(key string) (Metadata, io.ReadCloser, error)
Put(key string, r io.Reader, expiry time.Time, deleteKey string) (Metadata, error)
PutMetadata(key string, m Metadata) error
Size(key string) (int64, error)
}
@ -26,3 +20,6 @@ type MetaStorageBackend interface {
StorageBackend
List() ([]string, error)
}
var NotFoundErr = errors.New("File not found.")
var FileEmptyError = errors.New("Empty file")

141
build.sh

@ -1,94 +1,67 @@
#!/bin/bash
version="$1"
mkdir -p "binairies/""$version"
name="binairies/""$version""/linx-server-v""$version""_"
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
rice append --exec "$name"osx-amd64
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
rice append --exec "$name"osx-386
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
rice append --exec "$name"freebsd-amd64
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
rice append --exec "$name"freebsd-386
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
rice append --exec "$name"openbsd-amd64
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
rice append --exec "$name"openbsd-386
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
rice append --exec "$name"linux-arm
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
rice append --exec "$name"linux-amd64
GOOS=linux GOARCH=386 go build -o "$name"linux-386
rice append --exec "$name"linux-386
function build_binary_rice {
name="$1"
for arch in amd64 386; do
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
rice append --exec "$name"osx-$arch
done
for arch in amd64 386; do
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
rice append --exec "$name"freebsd-$arch
done
for arch in amd64 386; do
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
rice append --exec "$name"openbsd-$arch
done
for arch in arm arm64 amd64 386; do
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
rice append --exec "$name"linux-$arch
done
for arch in amd64 386; do
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
rice append --exec "$name"windows-$arch.exe
done
}
function build_binary {
name="$1"
for arch in amd64 386; do
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
done
for arch in amd64 386; do
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
done
for arch in amd64 386; do
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
done
for arch in arm arm64 amd64 386; do
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
done
for arch in amd64 386; do
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
done
}
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
rice append --exec "$name"windows-amd64.exe
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
rice append --exec "$name"windows-386.exe
version="$1"
mkdir -p "binaries/""$version"
build_binary_rice "binaries/""$version""/linx-server-v""$version""_"
cd linx-genkey
name="../binairies/""$version""/linx-genkey-v""$version""_"
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
GOOS=linux GOARCH=386 go build -o "$name"linux-386
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
build_binary "../binaries/""$version""/linx-genkey-v""$version""_"
cd ..
cd linx-cleanup
name="../binairies/""$version""/linx-cleanup-v""$version""_"
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
GOOS=linux GOARCH=386 go build -o "$name"linux-386
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
build_binary "../binaries/""$version""/linx-cleanup-v""$version""_"
cd ..

11
csp.go

@ -6,6 +6,7 @@ import (
const (
cspHeader = "Content-Security-Policy"
rpHeader = "Referrer-Policy"
frameOptionsHeader = "X-Frame-Options"
)
@ -15,8 +16,9 @@ type csp struct {
}
type CSPOptions struct {
policy string
frame string
policy string
referrerPolicy string
frame string
}
func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
@ -25,6 +27,11 @@ func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Add(cspHeader, c.opts.policy)
}
// only add a Referrer Policy if one is not already set
if existing := w.Header().Get(rpHeader); existing == "" {
w.Header().Add(rpHeader, c.opts.referrerPolicy)
}
w.Header().Set(frameOptionsHeader, c.opts.frame)
c.h.ServeHTTP(w, r)

12
csp_test.go

@ -12,6 +12,7 @@ import (
var testCSPHeaders = map[string]string{
"Content-Security-Policy": "default-src 'none'; style-src 'self';",
"Referrer-Policy": "strict-origin-when-cross-origin",
"X-Frame-Options": "SAMEORIGIN",
}
@ -22,8 +23,10 @@ func TestContentSecurityPolicy(t *testing.T) {
Config.maxSize = 1024 * 1024 * 1024
Config.noLogs = true
Config.siteName = "linx"
Config.contentSecurityPolicy = "default-src 'none'; style-src 'self';"
Config.xFrameOptions = "SAMEORIGIN"
Config.selifPath = "selif"
Config.contentSecurityPolicy = testCSPHeaders["Content-Security-Policy"]
Config.referrerPolicy = testCSPHeaders["Referrer-Policy"]
Config.xFrameOptions = testCSPHeaders["X-Frame-Options"]
mux := setup()
w := httptest.NewRecorder()
@ -34,8 +37,9 @@ func TestContentSecurityPolicy(t *testing.T) {
}
goji.Use(ContentSecurityPolicy(CSPOptions{
policy: testCSPHeaders["Content-Security-Policy"],
frame: testCSPHeaders["X-Frame-Options"],
policy: testCSPHeaders["Content-Security-Policy"],
referrerPolicy: testCSPHeaders["Referrer-Policy"],
frame: testCSPHeaders["X-Frame-Options"],
}))
mux.ServeHTTP(w, req)

19
delete.go

@ -3,8 +3,8 @@ package main
import (
"fmt"
"net/http"
"os"
"github.com/andreimarcu/linx-server/backends"
"github.com/zenazn/goji/web"
)
@ -13,24 +13,19 @@ func deleteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
filename := c.URLParams["name"]
// Ensure requested file actually exists
if _, readErr := fileBackend.Exists(filename); os.IsNotExist(readErr) {
// Ensure that file exists and delete key is correct
metadata, err := storageBackend.Head(filename)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r) // 404 - file doesn't exist
return
}
// Ensure delete key is correct
metadata, err := metadataRead(filename)
if err != nil {
} else if err != nil {
unauthorizedHandler(c, w, r) // 401 - no metadata available
return
}
if metadata.DeleteKey == requestKey {
fileDelErr := fileBackend.Delete(filename)
metaDelErr := metaStorageBackend.Delete(filename)
if (fileDelErr != nil) || (metaDelErr != nil) {
err := storageBackend.Delete(filename)
if err != nil {
oopsHandler(c, w, r, RespPLAIN, "Could not delete")
return
}

69
display.go

@ -2,12 +2,15 @@ package main
import (
"encoding/json"
"io/ioutil"
"net/http"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/dustin/go-humanize"
"github.com/flosch/pongo2"
@ -18,17 +21,21 @@ import (
const maxDisplayFileSizeBytes = 1024 * 512
var cliUserAgentRe = regexp.MustCompile("(?i)(lib)?curl|wget")
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if !Config.noDirectAgents && cliUserAgentRe.MatchString(r.Header.Get("User-Agent")) && !strings.EqualFold("application/json", r.Header.Get("Accept")) {
fileServeHandler(c, w, r)
return
}
fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, err := checkFile(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r)
return
}
metadata, err := metadataRead(fileName)
if err != nil {
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return
}
@ -44,11 +51,12 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
js, _ := json.Marshal(map[string]string{
"filename": fileName,
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
"size": strconv.FormatInt(metadata.Size, 10),
"mimetype": metadata.Mimetype,
"sha256sum": metadata.Sha256sum,
"filename": fileName,
"direct_url": getSiteURL(r) + Config.selifPath + fileName,
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
"size": strconv.FormatInt(metadata.Size, 10),
"mimetype": metadata.Mimetype,
"sha256sum": metadata.Sha256sum,
})
w.Write(js)
return
@ -69,8 +77,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
tpl = Templates["display/pdf.html"]
} else if extension == "story" {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil {
extra["contents"] = string(bytes)
lines = strings.Split(extra["contents"], "\n")
@ -79,8 +92,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
}
} else if extension == "md" {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil {
unsafe := blackfriday.MarkdownCommon(bytes)
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
@ -91,8 +109,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
}
} else if strings.HasPrefix(metadata.Mimetype, "text/") || supportedBinExtension(extension) {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil {
extra["extension"] = extension
extra["lang_hl"], extra["lang_ace"] = extensionToHlAndAceLangs(extension)
@ -108,15 +131,15 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
}
err = renderTemplate(tpl, pongo2.Context{
"mime": metadata.Mimetype,
"filename": fileName,
"size": sizeHuman,
"expiry": expiryHuman,
"extra": extra,
"lines": lines,
"files": metadata.ArchiveFiles,
"shorturlEnabled": Config.googleShorterAPIKey != "",
"shorturl": metadata.ShortURL,
"mime": metadata.Mimetype,
"filename": fileName,
"size": sizeHuman,
"expiry": expiryHuman,
"expirylist": listExpirationTimes(),
"extra": extra,
"forcerandom": Config.forceRandomFilename,
"lines": lines,
"files": metadata.ArchiveFiles,
}, r, w)
if err != nil {

2
expiry.go

@ -24,7 +24,7 @@ type ExpirationTime struct {
// Determine if the given filename is expired
func isFileExpired(filename string) (bool, error) {
metadata, err := metadataRead(filename)
metadata, err := storageBackend.Head(filename)
if err != nil {
return false, err
}

61
fileserve.go

@ -1,22 +1,28 @@
package main
import (
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/andreimarcu/linx-server/httputil"
"github.com/zenazn/goji/web"
)
func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, err := checkFile(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r)
return
} else if err == backends.BadMetadata {
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return
}
@ -32,8 +38,30 @@ func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
}
w.Header().Set("Content-Security-Policy", Config.fileContentSecurityPolicy)
w.Header().Set("Referrer-Policy", Config.fileReferrerPolicy)
fileBackend.ServeFile(fileName, w, r)
w.Header().Set("Content-Type", metadata.Mimetype)
w.Header().Set("Content-Length", strconv.FormatInt(metadata.Size, 10))
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", metadata.Sha256sum))
w.Header().Set("Cache-Control", "public, no-cache")
modtime := time.Unix(0, 0)
if done := httputil.CheckPreconditions(w, r, modtime); done == true {
return
}
if r.Method != "HEAD" {
_, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespAUTO, "Unable to open file.")
return
}
defer reader.Close()
if _, err = io.CopyN(w, reader, metadata.Size); err != nil {
oopsHandler(c, w, r, RespAUTO, err.Error())
}
}
}
func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
@ -53,29 +81,24 @@ func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
return
}
w.Header().Set("Etag", timeStartedStr)
w.Header().Set("Cache-Control", "max-age=86400")
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", timeStartedStr))
w.Header().Set("Cache-Control", "public, max-age=86400")
http.ServeContent(w, r, filePath, timeStarted, file)
return
}
}
func checkFile(filename string) error {
_, err := fileBackend.Exists(filename)
func checkFile(filename string) (metadata backends.Metadata, err error) {
metadata, err = storageBackend.Head(filename)
if err != nil {
return NotFoundErr
}
expired, err := isFileExpired(filename)
if err != nil {
return err
return
}
if expired {
fileBackend.Delete(filename)
metaStorageBackend.Delete(filename)
return NotFoundErr
if expiry.IsTsExpired(metadata.Expiry) {
storageBackend.Delete(filename)
err = backends.NotFoundErr
return
}
return nil
return
}

70
helpers/archive.go

@ -0,0 +1,70 @@
package helpers
import (
"archive/tar"
"archive/zip"
"compress/bzip2"
"compress/gzip"
"io"
"sort"
)
type ReadSeekerAt interface {
io.Reader
io.Seeker
io.ReaderAt
}
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) {
if mimetype == "application/x-tar" {
tReadr := tar.NewReader(r)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
} else if mimetype == "application/x-gzip" {
gzf, err := gzip.NewReader(r)
if err == nil {
tReadr := tar.NewReader(gzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
}
} else if mimetype == "application/x-bzip" {
bzf := bzip2.NewReader(r)
tReadr := tar.NewReader(bzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
} else if mimetype == "application/zip" {
zf, err := zip.NewReader(r, size)
if err == nil {
for _, f := range zf.File {
files = append(files, f.Name)
}
}
sort.Strings(files)
}
return
}

83
helpers/helpers.go

@ -0,0 +1,83 @@
package helpers
import (
"bytes"
"encoding/hex"
"io"
"unicode"
"github.com/andreimarcu/linx-server/backends"
"github.com/minio/sha256-simd"
"gopkg.in/h2non/filetype.v1"
)
func GenerateMetadata(r io.Reader) (m backends.Metadata, err error) {
// Since we don't have the ability to seek within a file, we can use a
// Buffer in combination with a TeeReader to keep a copy of the bytes
// we read when detecting the file type. These bytes are still needed
// to hash the file and determine its size and cannot be discarded.
var buf bytes.Buffer
teeReader := io.TeeReader(r, &buf)
// Get first 512 bytes for mimetype detection
header := make([]byte, 512)
_, err = teeReader.Read(header)
if err != nil {
return
}
// Create a Hash and a MultiReader that includes the Buffer we created
// above along with the original Reader, which will have the rest of
// the file.
hasher := sha256.New()
multiReader := io.MultiReader(&buf, r)
// Copy everything into the Hash, then use the number of bytes written
// as the file size.
var readLen int64
readLen, err = io.Copy(hasher, multiReader)
if err != nil {
return
} else {
m.Size += readLen
}
// Get the hex-encoded string version of the Hash checksum
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
// Use the bytes we extracted earlier and attempt to determine the file
// type
kind, err := filetype.Match(header)
if err != nil {
m.Mimetype = "application/octet-stream"
return m, err
} else if kind.MIME.Value != "" {
m.Mimetype = kind.MIME.Value
} else if printable(header) {
m.Mimetype = "text/plain"
} else {
m.Mimetype = "application/octet-stream"
}
return
}
func printable(data []byte) bool {
for i, b := range data {
r := rune(b)
// A null terminator that's not at the beginning of the file
if r == 0 && i == 0 {
return false
} else if r == 0 && i < 0 {
continue
}
if r > unicode.MaxASCII {
return false
}
}
return true
}

29
helpers/helpers_test.go

@ -0,0 +1,29 @@
package helpers
import (
"strings"
"testing"
)
func TestGenerateMetadata(t *testing.T) {
r := strings.NewReader("This is my test content")
m, err := GenerateMetadata(r)
if err != nil {
t.Fatal(err)
}
expectedSha256sum := "966152d20a77e739716a625373ee15af16e8f4aec631a329a27da41c204b0171"
if m.Sha256sum != expectedSha256sum {
t.Fatalf("Sha256sum was %q instead of expected value of %q", m.Sha256sum, expectedSha256sum)
}
expectedMimetype := "text/plain"
if m.Mimetype != expectedMimetype {
t.Fatalf("Mimetype was %q instead of expected value of %q", m.Mimetype, expectedMimetype)
}
expectedSize := int64(23)
if m.Size != expectedSize {
t.Fatalf("Size was %d instead of expected value of %d", m.Size, expectedSize)
}
}

27
httputil/LICENSE

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

218
httputil/conditional.go

@ -0,0 +1,218 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// HTTP file system request handler
package httputil
import (
"net/http"
"net/textproto"
"strings"
"time"
)
// scanETag determines if a syntactically valid ETag is present at s. If so,
// the ETag and remaining text after consuming ETag is returned. Otherwise,
// it returns "", "".
func scanETag(s string) (etag string, remain string) {
s = textproto.TrimString(s)
start := 0
if strings.HasPrefix(s, "W/") {
start = 2
}
if len(s[start:]) < 2 || s[start] != '"' {
return "", ""
}
// ETag is either W/"text" or "text".
// See RFC 7232 2.3.
for i := start + 1; i < len(s); i++ {
c := s[i]
switch {
// Character values allowed in ETags.
case c == 0x21 || c >= 0x23 && c <= 0x7E || c >= 0x80:
case c == '"':
return s[:i+1], s[i+1:]
default:
return "", ""
}
}
return "", ""
}
// etagStrongMatch reports whether a and b match using strong ETag comparison.
// Assumes a and b are valid ETags.
func etagStrongMatch(a, b string) bool {
return a == b && a != "" && a[0] == '"'
}
// etagWeakMatch reports whether a and b match using weak ETag comparison.
// Assumes a and b are valid ETags.
func etagWeakMatch(a, b string) bool {
return strings.TrimPrefix(a, "W/") == strings.TrimPrefix(b, "W/")
}
// condResult is the result of an HTTP request precondition check.
// See https://tools.ietf.org/html/rfc7232 section 3.
type condResult int
const (
condNone condResult = iota
condTrue
condFalse
)
func checkIfMatch(w http.ResponseWriter, r *http.Request) condResult {
im := r.Header.Get("If-Match")
if im == "" {
return condNone
}
for {
im = textproto.TrimString(im)
if len(im) == 0 {
break
}
if im[0] == ',' {
im = im[1:]
continue
}
if im[0] == '*' {
return condTrue
}
etag, remain := scanETag(im)
if etag == "" {
break
}
if etagStrongMatch(etag, w.Header().Get("Etag")) {
return condTrue
}
im = remain
}
return condFalse
}
func checkIfUnmodifiedSince(r *http.Request, modtime time.Time) condResult {
ius := r.Header.Get("If-Unmodified-Since")
if ius == "" || isZeroTime(modtime) {
return condNone
}
if t, err := http.ParseTime(ius); err == nil {
// The Date-Modified header truncates sub-second precision, so
// use mtime < t+1s instead of mtime <= t to check for unmodified.
if modtime.Before(t.Add(1 * time.Second)) {
return condTrue
}
return condFalse
}
return condNone
}
func checkIfNoneMatch(w http.ResponseWriter, r *http.Request) condResult {
inm := r.Header.Get("If-None-Match")
if inm == "" {
return condNone
}
buf := inm
for {
buf = textproto.TrimString(buf)
if len(buf) == 0 {
break
}
if buf[0] == ',' {
buf = buf[1:]
}
if buf[0] == '*' {
return condFalse
}
etag, remain := scanETag(buf)
if etag == "" {
break
}
if etagWeakMatch(etag, w.Header().Get("Etag")) {
return condFalse
}
buf = remain
}
return condTrue
}
func checkIfModifiedSince(r *http.Request, modtime time.Time) condResult {
if r.Method != "GET" && r.Method != "HEAD" {
return condNone
}
ims := r.Header.Get("If-Modified-Since")
if ims == "" || isZeroTime(modtime) {
return condNone
}
t, err := http.ParseTime(ims)
if err != nil {
return condNone
}
// The Date-Modified header truncates sub-second precision, so
// use mtime < t+1s instead of mtime <= t to check for unmodified.
if modtime.Before(t.Add(1 * time.Second)) {
return condFalse
}
return condTrue
}
var unixEpochTime = time.Unix(0, 0)
// isZeroTime reports whether t is obviously unspecified (either zero or Unix()=0).
func isZeroTime(t time.Time) bool {
return t.IsZero() || t.Equal(unixEpochTime)
}
func setLastModified(w http.ResponseWriter, modtime time.Time) {
if !isZeroTime(modtime) {
w.Header().Set("Last-Modified", modtime.UTC().Format(http.TimeFormat))
}
}
func writeNotModified(w http.ResponseWriter) {
// RFC 7232 section 4.1:
// a sender SHOULD NOT generate representation metadata other than the
// above listed fields unless said metadata exists for the purpose of
// guiding cache updates (e.g., Last-Modified might be useful if the
// response does not have an ETag field).
h := w.Header()
delete(h, "Content-Type")
delete(h, "Content-Length")
if h.Get("Etag") != "" {
delete(h, "Last-Modified")
}
w.WriteHeader(http.StatusNotModified)
}
// CheckPreconditions evaluates request preconditions and reports whether a precondition
// resulted in sending StatusNotModified or StatusPreconditionFailed.
func CheckPreconditions(w http.ResponseWriter, r *http.Request, modtime time.Time) (done bool) {
// This function carefully follows RFC 7232 section 6.
ch := checkIfMatch(w, r)
if ch == condNone {
ch = checkIfUnmodifiedSince(r, modtime)
}
if ch == condFalse {
w.WriteHeader(http.StatusPreconditionFailed)
return true
}
switch checkIfNoneMatch(w, r) {
case condFalse:
if r.Method == "GET" || r.Method == "HEAD" {
writeNotModified(w)
return true
} else {
w.WriteHeader(http.StatusPreconditionFailed)
return true
}
case condNone:
if checkIfModifiedSince(r, modtime) == condFalse {
writeNotModified(w)
return true
}
}
return false
}

10
linx-cleanup/cleanup.go

@ -5,7 +5,6 @@ import (
"log"
"github.com/andreimarcu/linx-server/backends/localfs"
"github.com/andreimarcu/linx-server/backends/metajson"
"github.com/andreimarcu/linx-server/expiry"
)
@ -22,17 +21,15 @@ func main() {
"don't log deleted files")
flag.Parse()
metaStorageBackend := localfs.NewLocalfsBackend(metaDir)
metaBackend := metajson.NewMetaJSONBackend(metaStorageBackend)
fileBackend := localfs.NewLocalfsBackend(filesDir)
fileBackend := localfs.NewLocalfsBackend(metaDir, filesDir)
files, err := metaStorageBackend.List()
files, err := fileBackend.List()
if err != nil {
panic(err)
}
for _, filename := range files {
metadata, err := metaBackend.Get(filename)
metadata, err := fileBackend.Head(filename)
if err != nil {
if !noLogs {
log.Printf("Failed to find metadata for %s", filename)
@ -44,7 +41,6 @@ func main() {
log.Printf("Delete %s", filename)
}
fileBackend.Delete(filename)
metaStorageBackend.Delete(filename)
}
}
}

165
meta.go

@ -1,165 +0,0 @@
package main
import (
"archive/tar"
"archive/zip"
"compress/bzip2"
"compress/gzip"
"crypto/sha256"
"encoding/hex"
"errors"
"io"
"sort"
"time"
"unicode"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/dchest/uniuri"
"gopkg.in/h2non/filetype.v1"
)
var NotFoundErr = errors.New("File not found.")
func generateMetadata(fName string, exp time.Time, delKey string) (m backends.Metadata, err error) {
file, err := fileBackend.Open(fName)
if err != nil {
return
}
defer file.Close()
m.Size, err = fileBackend.Size(fName)
if err != nil {
return
}
m.Expiry = exp
if delKey == "" {
m.DeleteKey = uniuri.NewLen(30)
} else {
m.DeleteKey = delKey
}
// Get first 512 bytes for mimetype detection
header := make([]byte, 512)
file.Read(header)
kind, err := filetype.Match(header)
if err != nil {
m.Mimetype = "application/octet-stream"
} else {
m.Mimetype = kind.MIME.Value
}
if m.Mimetype == "" {
// Check if the file seems anything like text
if printable(header) {
m.Mimetype = "text/plain"
} else {
m.Mimetype = "application/octet-stream"
}
}
// Compute the sha256sum
hasher := sha256.New()
file.Seek(0, 0)
_, err = io.Copy(hasher, file)
if err == nil {
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
}
file.Seek(0, 0)
// If archive, grab list of filenames
if m.Mimetype == "application/x-tar" {
tReadr := tar.NewReader(file)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
} else if m.Mimetype == "application/x-gzip" {
gzf, err := gzip.NewReader(file)
if err == nil {
tReadr := tar.NewReader(gzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
}
} else if m.Mimetype == "application/x-bzip" {
bzf := bzip2.NewReader(file)
tReadr := tar.NewReader(bzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
} else if m.Mimetype == "application/zip" {
zf, err := zip.NewReader(file, m.Size)
if err == nil {
for _, f := range zf.File {
m.ArchiveFiles = append(m.ArchiveFiles, f.Name)
}
}
sort.Strings(m.ArchiveFiles)
}
return
}
func metadataWrite(filename string, metadata *backends.Metadata) error {
return metaBackend.Put(filename, metadata)
}
func metadataRead(filename string) (metadata backends.Metadata, err error) {
metadata, err = metaBackend.Get(filename)
if err != nil {
// Metadata does not exist, generate one
newMData, err := generateMetadata(filename, expiry.NeverExpire, "")
if err != nil {
return metadata, err
}
metadataWrite(filename, &newMData)
metadata, err = metaBackend.Get(filename)
}
return
}
func printable(data []byte) bool {
for i, b := range data {
r := rune(b)
// A null terminator that's not at the beginning of the file
if r == 0 && i == 0 {
return false
} else if r == 0 && i < 0 {
continue
}
if r > unicode.MaxASCII {
return false
}
}
return true
}

46
pages.go

@ -21,8 +21,9 @@ const (
func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["index.html"], pongo2.Context{
"maxsize": Config.maxSize,
"expirylist": listExpirationTimes(),
"maxsize": Config.maxSize,
"expirylist": listExpirationTimes(),
"forcerandom": Config.forceRandomFilename,
}, r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
@ -31,7 +32,8 @@ func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["paste.html"], pongo2.Context{
"expirylist": listExpirationTimes(),
"expirylist": listExpirationTimes(),
"forcerandom": Config.forceRandomFilename,
}, r, w)
if err != nil {
oopsHandler(c, w, r, RespHTML, "")
@ -40,7 +42,8 @@ func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
func apiDocHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["API.html"], pongo2.Context{
"siteurl": getSiteURL(r),
"siteurl": getSiteURL(r),
"forcerandom": Config.forceRandomFilename,
}, r, w)
if err != nil {
oopsHandler(c, w, r, RespHTML, "")
@ -64,12 +67,10 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
w.WriteHeader(500)
renderTemplate(Templates["oops.html"], pongo2.Context{"msg": msg}, r, w)
return
} else if rt == RespPLAIN {
w.WriteHeader(500)
fmt.Fprintf(w, "%s", msg)
return
} else if rt == RespJSON {
js, _ := json.Marshal(map[string]string{
"error": msg,
@ -79,7 +80,6 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
w.WriteHeader(500)
w.Write(js)
return
} else if rt == RespAUTO {
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
oopsHandler(c, w, r, RespJSON, msg)
@ -89,11 +89,33 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
}
}
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusBadRequest)
err := renderTemplate(Templates["400.html"], pongo2.Context{}, r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, msg string) {
if rt == RespHTML {
w.WriteHeader(http.StatusBadRequest)
err := renderTemplate(Templates["400.html"], pongo2.Context{"msg": msg}, r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
return
} else if rt == RespPLAIN {
w.WriteHeader(http.StatusBadRequest)
fmt.Fprintf(w, "%s", msg)
return
} else if rt == RespJSON {
js, _ := json.Marshal(map[string]string{
"error": msg,
})
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.WriteHeader(http.StatusBadRequest)
w.Write(js)
return
} else if rt == RespAUTO {
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
badRequestHandler(c, w, r, RespJSON, msg)
} else {
badRequestHandler(c, w, r, RespHTML, msg)
}
}
}

68
server.go

@ -16,7 +16,7 @@ import (
"github.com/GeertJohan/go.rice"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/backends/localfs"
"github.com/andreimarcu/linx-server/backends/metajson"
"github.com/andreimarcu/linx-server/backends/s3"
"github.com/flosch/pongo2"
"github.com/vharitonsky/iniflags"
"github.com/zenazn/goji/graceful"
@ -42,10 +42,13 @@ var Config struct {
siteName string
siteURL string
sitePath string
selifPath string
certFile string
keyFile string
contentSecurityPolicy string
fileContentSecurityPolicy string
referrerPolicy string
fileReferrerPolicy string
xFrameOptions string
maxSize int64
maxExpiry uint64
@ -57,7 +60,12 @@ var Config struct {
authFile string
remoteAuthFile string
addHeaders headerList
googleShorterAPIKey string
noDirectAgents bool
s3Endpoint string
s3Region string
s3Bucket string
s3ForcePathStyle bool
forceRandomFilename bool
}
var Templates = make(map[string]*pongo2.Template)
@ -67,8 +75,7 @@ var timeStarted time.Time
var timeStartedStr string
var remoteAuthKeys []string
var metaStorageBackend backends.MetaStorageBackend
var metaBackend backends.MetaBackend
var fileBackend backends.StorageBackend
var storageBackend backends.StorageBackend
func setup() *web.Mux {
mux := web.New()
@ -87,8 +94,9 @@ func setup() *web.Mux {
mux.Use(middleware.Recoverer)
mux.Use(middleware.AutomaticOptions)
mux.Use(ContentSecurityPolicy(CSPOptions{
policy: Config.contentSecurityPolicy,
frame: Config.xFrameOptions,
policy: Config.contentSecurityPolicy,
referrerPolicy: Config.referrerPolicy,
frame: Config.xFrameOptions,
}))
mux.Use(AddHeaders(Config.addHeaders))
@ -126,9 +134,16 @@ func setup() *web.Mux {
Config.sitePath = "/"
}
metaStorageBackend = localfs.NewLocalfsBackend(Config.metaDir)
metaBackend = metajson.NewMetaJSONBackend(metaStorageBackend)
fileBackend = localfs.NewLocalfsBackend(Config.filesDir)
Config.selifPath = strings.TrimLeft(Config.selifPath, "/")
if lastChar := Config.selifPath[len(Config.selifPath)-1:]; lastChar != "/" {
Config.selifPath = Config.selifPath + "/"
}
if Config.s3Bucket != "" {
storageBackend = s3.NewS3Backend(Config.s3Bucket, Config.s3Region, Config.s3Endpoint, Config.s3ForcePathStyle)
} else {
storageBackend = localfs.NewLocalfsBackend(Config.metaDir, Config.filesDir)
}
// Template setup
p2l, err := NewPongo2TemplatesLoader()
@ -147,10 +162,9 @@ func setup() *web.Mux {
// Routing setup
nameRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)$`)
selifRe := regexp.MustCompile("^" + Config.sitePath + `selif/(?P<name>[a-z0-9-\.]+)$`)
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + `selif/$`)
selifRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `(?P<name>[a-z0-9-\.]+)$`)
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `$`)
torrentRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/torrent$`)
shortRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/short$`)
if Config.authFile == "" {
mux.Get(Config.sitePath, indexHandler)
@ -189,10 +203,6 @@ func setup() *web.Mux {
mux.Get(selifIndexRe, unauthorizedHandler)
mux.Get(torrentRe, fileTorrentHandler)
if Config.googleShorterAPIKey != "" {
mux.Get(shortRe, shortURLHandler)
}
mux.NotFound(notFoundHandler)
return mux
@ -213,6 +223,8 @@ func main() {
"name of the site")
flag.StringVar(&Config.siteURL, "siteurl", "",
"site base url (including trailing slash)")
flag.StringVar(&Config.selifPath, "selifpath", "selif",
"path relative to site base url where files are accessed directly")
flag.Int64Var(&Config.maxSize, "maxsize", 4*1024*1024*1024,
"maximum upload file size in bytes (default 4GB)")
flag.Uint64Var(&Config.maxExpiry, "maxexpiry", 0,
@ -232,17 +244,33 @@ func main() {
flag.StringVar(&Config.remoteAuthFile, "remoteauthfile", "",
"path to a file containing newline-separated scrypted auth keys for remote uploads")
flag.StringVar(&Config.contentSecurityPolicy, "contentsecuritypolicy",
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
"value of default Content-Security-Policy header")
flag.StringVar(&Config.fileContentSecurityPolicy, "filecontentsecuritypolicy",
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
"value of Content-Security-Policy header for file access")
flag.StringVar(&Config.referrerPolicy, "referrerpolicy",
"same-origin",
"value of default Referrer-Policy header")
flag.StringVar(&Config.fileReferrerPolicy, "filereferrerpolicy",
"same-origin",
"value of Referrer-Policy header for file access")
flag.StringVar(&Config.xFrameOptions, "xframeoptions", "SAMEORIGIN",
"value of X-Frame-Options header")
flag.Var(&Config.addHeaders, "addheader",
"Add an arbitrary header to the response. This option can be used multiple times.")
flag.StringVar(&Config.googleShorterAPIKey, "googleapikey", "",
"API Key for Google's URL Shortener.")
flag.BoolVar(&Config.noDirectAgents, "nodirectagents", false,
"disable serving files directly for wget/curl user agents")
flag.StringVar(&Config.s3Endpoint, "s3-endpoint", "",
"S3 endpoint")
flag.StringVar(&Config.s3Region, "s3-region", "",
"S3 region")
flag.StringVar(&Config.s3Bucket, "s3-bucket", "",
"S3 bucket to use for files and metadata")
flag.BoolVar(&Config.s3ForcePathStyle, "s3-force-path-style", false,
"Force path-style addressing for S3 (e.g. https://s3.amazonaws.com/linx/example.txt)")
flag.BoolVar(&Config.forceRandomFilename, "force-random-filename", false,
"Force all uploads to use a random filename")
iniflags.Parse()

206
server_test.go

@ -173,7 +173,7 @@ func TestFileNotFound(t *testing.T) {
filename := generateBarename()
req, err := http.NewRequest("GET", "/selif/"+filename, nil)
req, err := http.NewRequest("GET", "/"+Config.selifPath+filename, nil)
if err != nil {
t.Fatal(err)
}
@ -486,7 +486,6 @@ func TestPostJSONUploadMaxExpiry(t *testing.T) {
var myjson RespOkJSON
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil {
fmt.Println(w.Body.String())
t.Fatal(err)
}
@ -643,14 +642,45 @@ func TestPostEmptyUpload(t *testing.T) {
mux.ServeHTTP(w, req)
if w.Code != 500 {
if w.Code != 400 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
t.Fatalf("Status code is not 400, but %d", w.Code)
}
}
func TestPostTooLargeUpload(t *testing.T) {
mux := setup()
oldMaxSize := Config.maxSize
Config.maxSize = 2
w := httptest.NewRecorder()
filename := generateBarename() + ".txt"
var b bytes.Buffer
mw := multipart.NewWriter(&b)
fw, err := mw.CreateFormFile("file", filename)
if err != nil {
t.Fatal(err)
}
fw.Write([]byte("test content"))
mw.Close()
req, err := http.NewRequest("POST", "/upload/", &b)
req.Header.Set("Content-Type", mw.FormDataContentType())
req.Header.Set("Referer", Config.siteURL)
if err != nil {
t.Fatal(err)
}
if !strings.Contains(w.Body.String(), "Empty file") {
t.Fatal("Response did not contain 'Empty file'")
mux.ServeHTTP(w, req)
if w.Code != 400 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 400, but %d", w.Code)
}
Config.maxSize = oldMaxSize
}
func TestPostEmptyJSONUpload(t *testing.T) {
@ -679,9 +709,9 @@ func TestPostEmptyJSONUpload(t *testing.T) {
mux.ServeHTTP(w, req)
if w.Code != 500 {
if w.Code != 400 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
t.Fatalf("Status code is not 400, but %d", w.Code)
}
var myjson RespErrJSON
@ -690,7 +720,7 @@ func TestPostEmptyJSONUpload(t *testing.T) {
t.Fatal(err)
}
if myjson.Error != "Could not upload file: Empty file" {
if myjson.Error != "Empty file" {
t.Fatal("Json 'error' was not 'Empty file' but " + myjson.Error)
}
}
@ -733,6 +763,32 @@ func TestPutRandomizedUpload(t *testing.T) {
}
}
func TestPutForceRandomUpload(t *testing.T) {
mux := setup()
w := httptest.NewRecorder()
oldFRF := Config.forceRandomFilename
Config.forceRandomFilename = true
filename := "randomizeme.file"
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
// while this should also work without this header, let's try to force
// the randomized filename off to be sure
req.Header.Set("Linx-Randomize", "no")
mux.ServeHTTP(w, req)
if w.Body.String() == Config.siteURL+filename {
t.Fatal("Filename was not random")
}
Config.forceRandomFilename = oldFRF
}
func TestPutNoExtensionUpload(t *testing.T) {
mux := setup()
w := httptest.NewRecorder()
@ -768,11 +824,41 @@ func TestPutEmptyUpload(t *testing.T) {
mux.ServeHTTP(w, req)
if !strings.Contains(w.Body.String(), "Empty file") {
t.Fatal("Response doesn't contain'Empty file'")
if w.Code != 400 {
t.Fatalf("Status code is not 400, but %d", w.Code)
}
}
func TestPutTooLargeUpload(t *testing.T) {
mux := setup()
oldMaxSize := Config.maxSize
Config.maxSize = 2
w := httptest.NewRecorder()
filename := generateBarename() + ".file"
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File too big"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Linx-Randomize", "yes")
mux.ServeHTTP(w, req)
if w.Code != 500 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
}
if !strings.Contains(w.Body.String(), "request body too large") {
t.Fatal("Response did not contain 'request body too large'")
}
Config.maxSize = oldMaxSize
}
func TestPutJSONUpload(t *testing.T) {
var myjson RespOkJSON
@ -941,7 +1027,7 @@ func TestPutAndOverwrite(t *testing.T) {
// Make sure it's the new file
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", "/selif/"+myjson.Filename, nil)
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
mux.ServeHTTP(w, req)
if w.Code == 404 {
@ -953,6 +1039,55 @@ func TestPutAndOverwrite(t *testing.T) {
}
}
func TestPutAndOverwriteForceRandom(t *testing.T) {
var myjson RespOkJSON
mux := setup()
w := httptest.NewRecorder()
oldFRF := Config.forceRandomFilename
Config.forceRandomFilename = true
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Accept", "application/json")
mux.ServeHTTP(w, req)
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil {
t.Fatal(err)
}
// Overwrite it
w = httptest.NewRecorder()
req, err = http.NewRequest("PUT", "/upload/"+myjson.Filename, strings.NewReader("New file content"))
req.Header.Set("Linx-Delete-Key", myjson.Delete_Key)
mux.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatal("Status code was not 200, but " + strconv.Itoa(w.Code))
}
// Make sure it's the new file
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
mux.ServeHTTP(w, req)
if w.Code == 404 {
t.Fatal("Status code was 404")
}
if w.Body.String() != "New file content" {
t.Fatal("File did not contain 'New file content")
}
Config.forceRandomFilename = oldFRF
}
func TestPutAndSpecificDelete(t *testing.T) {
var myjson RespOkJSON
@ -1121,3 +1256,50 @@ func TestShutdown(t *testing.T) {
os.RemoveAll(Config.filesDir)
os.RemoveAll(Config.metaDir)
}
func TestPutAndGetCLI(t *testing.T) {
var myjson RespOkJSON
mux := setup()
// upload file
w := httptest.NewRecorder()
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Accept", "application/json")
mux.ServeHTTP(w, req)
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil {
t.Fatal(err)
}
// request file without wget user agent
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", myjson.Url, nil)
if err != nil {
t.Fatal(err)
}
mux.ServeHTTP(w, req)
contentType := w.Header().Get("Content-Type")
if strings.HasPrefix(contentType, "text/plain") {
t.Fatalf("Didn't receive file display page but %s", contentType)
}
// request file with wget user agent
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", myjson.Url, nil)
req.Header.Set("User-Agent", "wget")
if err != nil {
t.Fatal(err)
}
mux.ServeHTTP(w, req)
contentType = w.Header().Get("Content-Type")
if !strings.HasPrefix(contentType, "text/plain") {
t.Fatalf("Didn't receive file directly but %s", contentType)
}
}

89
shorturl.go

@ -1,89 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"errors"
"net/http"
"github.com/zenazn/goji/web"
)
type shortenerRequest struct {
LongURL string `json:"longUrl"`
}
type shortenerResponse struct {
Kind string `json:"kind"`
ID string `json:"id"`
LongURL string `json:"longUrl"`
Error struct {
Code int `json:"code"`
Message string `json:"message"`
} `json:"error"`
}
func shortURLHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
notFoundHandler(c, w, r)
return
}
metadata, err := metadataRead(fileName)
if err != nil {
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
return
}
if metadata.ShortURL == "" {
url, err := shortenURL(getSiteURL(r) + fileName)
if err != nil {
oopsHandler(c, w, r, RespJSON, err.Error())
return
}
metadata.ShortURL = url
err = metadataWrite(fileName, &metadata)
if err != nil {
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
return
}
}
js, _ := json.Marshal(map[string]string{
"shortUrl": metadata.ShortURL,
})
w.Write(js)
return
}
func shortenURL(url string) (string, error) {
apiURL := "https://www.googleapis.com/urlshortener/v1/url?key=" + Config.googleShorterAPIKey
jsonStr, _ := json.Marshal(shortenerRequest{LongURL: url})
req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonStr))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
shortenerResponse := new(shortenerResponse)
err = json.NewDecoder(resp.Body).Decode(shortenerResponse)
if err != nil {
return "", err
}
if shortenerResponse.Error.Message != "" {
return "", errors.New(shortenerResponse.Error.Message)
}
return shortenerResponse.ID, nil
}

10
static/css/dropzone.css

@ -31,17 +31,25 @@
border: 2px solid #FAFBFC;
}
#dropzone { width: 400px;
#dropzone {
width: 400px;
margin-left: auto;
margin-right: auto;
}
@media(max-width: 450px) {
#dropzone {
width: auto;
}
}
#uploads {
margin-top: 20px;
}
div.dz-default {
border: 2px dashed #C9C9C9;
border-radius: 5px;
color: #C9C9C9;
font: 14px "helvetica neue",helvetica,arial,sans-serif;
background-color: #FAFBFC;

3
static/css/github-markdown.css

@ -8,7 +8,8 @@
font-size: 12px;
line-height: 1.6;
word-wrap: break-word;
width: 680px;
width: 80vw;
max-width: 680px;
padding: 10px;
}

211
static/css/linx.css

@ -1,56 +1,56 @@
body {
background-color: #E8ECF0;
color: #556A7F;
background-color: #E8ECF0;
color: #556A7F;
font-family: Arial, Helvetica, sans-serif;
font-size: 14px;
font-family: Arial, Helvetica, sans-serif;
font-size: 14px;
}
#container_container {
display: table;
table-layout: fixed;
margin-left: auto;
margin-right: auto;
display: table;
table-layout: fixed;
margin-left: auto;
margin-right: auto;
}
#container {
display: table-cell;
min-width: 200px;
display: table-cell;
min-width: 200px;
}
#header a {
text-decoration: none;
color: #556A7F;
text-decoration: none;
color: #556A7F;
}
#navigation {
margin-top: 4px;
margin-top: 4px;
}
#navigation a {
text-decoration: none;
border-bottom: 1px dotted #556A7F;
color: #556A7F;
text-decoration: none;
border-bottom: 1px dotted #556A7F;
color: #556A7F;
}
#navigation a:hover {
background-color: #C7D1EB;
background-color: #C7D1EB;
}
#main {
background-color: white;
background-color: white;
padding: 6px 5px 8px 5px;
padding: 6px 5px 8px 5px;
-moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc;
-moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc;
text-align: center;
text-align: center;
}
#main a {
color: #556A7F;
color: #556A7F;
}
#normal-content {
@ -62,28 +62,29 @@ body {
margin-bottom: 0;
}
.ninfo {
margin-bottom: 5px;
}
.dinfo {
-moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc;
margin-bottom: 15px;
}
#info {
text-align: left;
background-color: white;
padding: 5px 5px 5px 5px;
padding: 5px;
}
#info #filename,
#editform #filename {
width: 232px;
.info-flex {
display: flex;
flex-wrap: wrap;
align-items: baseline;
justify-content: space-between;
}
.info-actions {
margin-left: 15px;
font-size: 13px;
text-align: right;
}
#info #extension,
@ -91,15 +92,6 @@ body {
width: 40px;
}
#info .float-left {
margin-top: 2px;
margin-right: 20px;
}
#info .right {
font-size: 13px;
}
#info a {
text-decoration: none;
color: #556A7F;
@ -110,88 +102,97 @@ body {
background-color: #E8ECF0;
}
#info input[type=text] {
border: 0;
color: #556A7F;
#info input[type=checkbox] {
margin: 0;
vertical-align: bottom;
}
#footer {
color: gray;
text-align: right;
margin-top: 30px;
margin-bottom: 10px;
font-size: 11px;
color: gray;
text-align: right;
margin-top: 30px;
margin-bottom: 10px;
font-size: 11px;
}
#footer a {
color: gray;
text-decoration: none;
color: gray;
text-decoration: none;
}
.normal {
text-align: left;
font-size: 13px;
text-align: left;
font-size: 13px;
}
.normal a {
text-decoration: none;
border-bottom: 1px dotted gray;
text-decoration: none;
border-bottom: 1px dotted gray;
}
.normal a:hover {
color: black;
background-color: #E8ECF0;
color: black;
background-color: #E8ECF0;
}
.normal ul {
padding-left: 15px;
padding-left: 15px;
}
.normal li {
margin-bottom: 3px;
list-style: none;
margin-bottom: 3px;
list-style: none;
}
.normal li a {
font-weight: bold;
font-weight: bold;
}
.fixed {
width: 800px;
width: 80vw;
max-width: 800px;
}
.paste {
width: 70vw;
max-width: 700px;
}
.needs-border {
border-top: 1px solid rgb(214, 214, 214);
border-top: 1px solid rgb(214, 214, 214);
}
.left {
text-align: left;
text-align: left;
}
.float-left {
float: left;
float: left;
}
.pad-left {
padding-left: 10px;
}
.pad-right {
padding-right: 10px;
padding-right: 10px;
}
.text-right {
text-align: right;
text-align: right;
}
.center {
text-align: center;
text-align: center;
}
.float-right, .right {
float: right;
float: right;
}
.clear {
clear: both;
clear: both;
}
#upload_header {
@ -245,19 +246,24 @@ body {
}
#choices {
float: left;
width: 100%;
text-align: left;
vertical-align: bottom;
margin-top: 5px;
font-size:13px;
display: flex;
align-items: center;
flex-wrap: wrap;
justify-content: space-between;
width: 100%;
margin-top: 5px;
font-size: 13px;
}
#expiry {
float: right;
padding-top: 1px;
}
#randomize {
vertical-align: bottom;
margin: 0;
}
.oopscontent {
width: 400px;
}
@ -267,13 +273,35 @@ body {
border: 0;
}
.error-404 img {
max-width: 90vw;
}
.padme {
padding-left: 5px;
padding-right: 5px;
}
.editor {
width: 705px;
height: 450px;
border-color: #cccccc;
font-family: monospace;
resize: none;
overflow: auto;
width: 100%;
height: 450px;
border: 1px solid #eaeaea;
font-family: monospace;
resize: none;
overflow: auto;
border-radius: 2px;
padding: 2px;
box-sizing: border-box;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
}
#info input[type=text] {
border: 1px solid #eaeaea;
color: #556A7F;
padding: 2px 4px;
font-family: Arial, Helvetica, sans-serif;
}
.storygreen {
@ -287,7 +315,7 @@ body {
/* Content display {{{ */
.display-audio,
.display-file {
width: 500px;
width: 100%;
}
.display-image {
@ -315,16 +343,17 @@ body {
#editform,
#editform .editor {
display: none;
width: 100%
}
#codeb {
white-space: pre-wrap;
}
#editor {
#inplace-editor {
display: none;
width: 794px;
width: 100%;
height: 800px;
font-size: 13px;
}
/* }}} */
/* }}} */

116
static/js/bin.js

@ -1,58 +1,58 @@
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
var navlist = document.getElementById("info").getElementsByClassName("right")[0];
init();
function init() {
var editA = document.createElement('a');
editA.setAttribute("href", "#");
editA.addEventListener('click', function(ev) {
edit(ev);
return false;
});
editA.innerHTML = "edit";
var separator = document.createTextNode(" | ");
navlist.insertBefore(editA, navlist.firstChild);
navlist.insertBefore(separator, navlist.children[1]);
document.getElementById('save').addEventListener('click', paste);
document.getElementById('wordwrap').addEventListener('click', wrap);
}
function edit(ev) {
ev.preventDefault();
navlist.remove();
document.getElementById("filename").remove();
document.getElementById("editform").style.display = "block";
var normalcontent = document.getElementById("normal-content");
normalcontent.removeChild(document.getElementById("normal-code"));
var editordiv = document.getElementById("editor");
editordiv.style.display = "block";
editordiv.addEventListener('keydown', handleTab);
}
function paste(ev) {
var editordiv = document.getElementById("editor");
document.getElementById("newcontent").value = editordiv.value;
document.forms["reply"].submit();
}
function wrap(ev) {
if (document.getElementById("wordwrap").checked) {
document.getElementById("codeb").style.wordWrap = "break-word";
document.getElementById("codeb").style.whiteSpace = "pre-wrap";
}
else {
document.getElementById("codeb").style.wordWrap = "normal";
document.getElementById("codeb").style.whiteSpace = "pre";
}
}
// @license-end
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
var navlist = document.getElementById("info").getElementsByClassName("info-actions")[0];
init();
function init() {
var editA = document.createElement('a');
editA.setAttribute("href", "#");
editA.addEventListener('click', function(ev) {
edit(ev);
return false;
});
editA.innerHTML = "edit";
var separator = document.createTextNode(" | ");
navlist.insertBefore(editA, navlist.firstChild);
navlist.insertBefore(separator, navlist.children[1]);
document.getElementById('save').addEventListener('click', paste);
document.getElementById('wordwrap').addEventListener('click', wrap);
}
function edit(ev) {
ev.preventDefault();
navlist.remove();
document.getElementById("filename").remove();
document.getElementById("editform").style.display = "block";
var normalcontent = document.getElementById("normal-content");
normalcontent.removeChild(document.getElementById("normal-code"));
var editordiv = document.getElementById("inplace-editor");
editordiv.style.display = "block";
editordiv.addEventListener('keydown', handleTab);
}
function paste(ev) {
var editordiv = document.getElementById("inplace-editor");
document.getElementById("newcontent").value = editordiv.value;
document.forms["reply"].submit();
}
function wrap(ev) {
if (document.getElementById("wordwrap").checked) {
document.getElementById("codeb").style.wordWrap = "break-word";
document.getElementById("codeb").style.whiteSpace = "pre-wrap";
}
else {
document.getElementById("codeb").style.wordWrap = "normal";
document.getElementById("codeb").style.whiteSpace = "pre";
}
}
// @license-end

39
static/js/shorturl.js

@ -1,39 +0,0 @@
document.getElementById('shorturl').addEventListener('click', function (e) {
e.preventDefault();
if (e.target.href !== "") return;
xhr = new XMLHttpRequest();
xhr.open("GET", e.target.dataset.url, true);
xhr.setRequestHeader('Accept', 'application/json');
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
var resp = JSON.parse(xhr.responseText);
if (xhr.status === 200 && resp.error == null) {
e.target.innerText = resp.shortUrl;
e.target.href = resp.shortUrl;
e.target.setAttribute('aria-label', 'Click to copy into clipboard')
} else {
e.target.setAttribute('aria-label', resp.error)
}
}
};
xhr.send();
});
var clipboard = new Clipboard("#shorturl", {
text: function (trigger) {
if (trigger.href == null) return;
return trigger.href;
}
});
clipboard.on('success', function (e) {
e.trigger.setAttribute('aria-label', 'Successfully copied')
});
clipboard.on('error', function (e) {
e.trigger.setAttribute('aria-label', 'Your browser does not support coping to clipboard')
});

171
static/js/upload.js

@ -1,51 +1,54 @@
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
Dropzone.options.dropzone = {
init: function() {
var dzone = document.getElementById("dzone");
dzone.style.display = "block";
},
addedfile: function(file) {
var upload = document.createElement("div");
upload.className = "upload";
init: function() {
var dzone = document.getElementById("dzone");
dzone.style.display = "block";
},
addedfile: function(file) {
var upload = document.createElement("div");
upload.className = "upload";
var fileLabel = document.createElement("span");
fileLabel.innerHTML = file.name;
file.fileLabel = fileLabel;
upload.appendChild(fileLabel);
var fileLabel = document.createElement("span");
fileLabel.innerHTML = file.name;
file.fileLabel = fileLabel;
upload.appendChild(fileLabel);
var fileActions = document.createElement("div");
fileActions.className = "right";
file.fileActions = fileActions;
upload.appendChild(fileActions);
var fileActions = document.createElement("div");
fileActions.className = "right";
file.fileActions = fileActions;
upload.appendChild(fileActions);
var cancelAction = document.createElement("span");
cancelAction.className = "cancel";
cancelAction.innerHTML = "Cancel";
cancelAction.addEventListener('click', function(ev) {
this.removeFile(file);
}.bind(this));
file.cancelActionElement = cancelAction;
fileActions.appendChild(cancelAction);
var cancelAction = document.createElement("span");
cancelAction.className = "cancel";
cancelAction.innerHTML = "Cancel";
cancelAction.addEventListener('click', function(ev) {
this.removeFile(file);
}.bind(this));
file.cancelActionElement = cancelAction;
fileActions.appendChild(cancelAction);
var progress = document.createElement("span");
file.progressElement = progress;
fileActions.appendChild(progress);
var progress = document.createElement("span");
file.progressElement = progress;
fileActions.appendChild(progress);
file.uploadElement = upload;
file.uploadElement = upload;
document.getElementById("uploads").appendChild(upload);
},
uploadprogress: function(file, p, bytesSent) {
p = parseInt(p);
file.progressElement.innerHTML = p + "%";
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
},
sending: function(file, xhr, formData) {
formData.append("randomize", document.getElementById("randomize").checked);
formData.append("expires", document.getElementById("expires").value);
},
success: function(file, resp) {
document.getElementById("uploads").appendChild(upload);
},
uploadprogress: function(file, p, bytesSent) {
p = parseInt(p);
file.progressElement.innerHTML = p + "%";
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
},
sending: function(file, xhr, formData) {
var randomize = document.getElementById("randomize");
if(randomize != null) {
formData.append("randomize", randomize.checked);
}
formData.append("expires", document.getElementById("expires").value);
},
success: function(file, resp) {
file.fileActions.removeChild(file.progressElement);
var fileLabelLink = document.createElement("a");
@ -59,51 +62,61 @@ Dropzone.options.dropzone = {
var deleteAction = document.createElement("span");
deleteAction.innerHTML = "Delete";
deleteAction.className = "cancel";
deleteAction.addEventListener('click', function(ev) {
xhr = new XMLHttpRequest();
xhr.open("DELETE", resp.url, true);
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
xhr.onreadystatechange = function(file) {
if (xhr.readyState == 4 && xhr.status === 200) {
var text = document.createTextNode("Deleted ");
file.fileLabel.insertBefore(text, file.fileLabelLink);
file.fileLabel.className = "deleted";
file.fileActions.removeChild(file.cancelActionElement);
}
}.bind(this, file);
xhr.send();
});
file.fileActions.removeChild(file.cancelActionElement);
file.cancelActionElement = deleteAction;
file.fileActions.appendChild(deleteAction);
},
error: function(file, resp, xhrO) {
deleteAction.addEventListener('click', function(ev) {
xhr = new XMLHttpRequest();
xhr.open("DELETE", resp.url, true);
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
xhr.onreadystatechange = function(file) {
if (xhr.readyState == 4 && xhr.status === 200) {
var text = document.createTextNode("Deleted ");
file.fileLabel.insertBefore(text, file.fileLabelLink);
file.fileLabel.className = "deleted";
file.fileActions.removeChild(file.cancelActionElement);
}
}.bind(this, file);
xhr.send();
});
file.fileActions.removeChild(file.cancelActionElement);
file.cancelActionElement = deleteAction;
file.fileActions.appendChild(deleteAction);
},
error: function(file, resp, xhrO) {
file.fileActions.removeChild(file.cancelActionElement);
file.fileActions.removeChild(file.progressElement);
if (file.status === "canceled") {
file.fileLabel.innerHTML = file.name + ": Canceled ";
}
else {
if (resp.error) {
file.fileLabel.innerHTML = file.name + ": " + resp.error;
}
else if (resp.includes("<html")) {
file.fileLabel.innerHTML = file.name + ": Server Error";
}
else {
file.fileLabel.innerHTML = file.name + ": " + resp;
}
}
file.fileLabel.className = "error";
},
if (file.status === "canceled") {
file.fileLabel.innerHTML = file.name + ": Canceled ";
}
else {
if (resp.error) {
file.fileLabel.innerHTML = file.name + ": " + resp.error;
}
else if (resp.includes("<html")) {
file.fileLabel.innerHTML = file.name + ": Server Error";
}
else {
file.fileLabel.innerHTML = file.name + ": " + resp;
}
}
file.fileLabel.className = "error";
},
maxFilesize: Math.round(parseInt(document.getElementById("dropzone").getAttribute("data-maxsize"), 10) / 1024 / 1024),
previewsContainer: "#uploads",
parallelUploads: 5,
headers: {"Accept": "application/json"},
dictDefaultMessage: "Click or Drop file(s)",
dictFallbackMessage: ""
previewsContainer: "#uploads",
parallelUploads: 5,
headers: {"Accept": "application/json"},
dictDefaultMessage: "Click or Drop file(s) or Paste image",
dictFallbackMessage: ""
};
document.onpaste = function(event) {
var items = (event.clipboardData || event.originalEvent.clipboardData).items;
for (index in items) {
var item = items[index];
if (item.kind === "file") {
Dropzone.forElement("#dropzone").addFile(item.getAsFile());
}
}
};
// @end-license

1
templates.go

@ -83,6 +83,7 @@ func renderTemplate(tpl *pongo2.Template, context pongo2.Context, r *http.Reques
}
context["sitepath"] = Config.sitePath
context["selifpath"] = Config.selifPath
context["using_auth"] = Config.authFile != ""
return tpl.ExecuteWriter(context, writer)

4
templates/404.html

@ -1,5 +1,7 @@
{% extends "base.html" %}
{% block content %}
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
<div class="error-404">
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
</div>
{% endblock %}

16
templates/API.html

@ -25,8 +25,10 @@
<p><strong>Optional headers with the request</strong></p>
{% if not forcerandom %}
<p>Randomize the filename<br/>
<code>Linx-Randomize: yes</code></p>
{% endif %}
<p>Specify a custom deletion key<br/>
<code>Linx-Delete-Key: mysecret</code></p>
@ -41,6 +43,7 @@
<blockquote>
<p>“url”: the publicly available upload url<br/>
“direct_url”: the url to access the file directly<br/>
“filename”: the (optionally generated) filename<br/>
“delete_key”: the (optionally generated) deletion key,<br/>
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
@ -55,30 +58,30 @@
{% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}7z4h4ut.jpg{% endif %}</code></pre>
{% else %}
<pre><code>$ curl -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}wtq7pan.jpg{% endif %}</code></pre>
{% endif %}
<p>Uploading myphoto.jpg with an expiry of 20 minutes</p>
{% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}jm295snf.jpg{% endif %}</code></pre>
{% else %}
<pre><code>$ curl -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}1doym9u2.jpg{% endif %}</code></pre>
{% endif %}
<p>Uploading myphoto.jpg with a random filename and getting a json response:</p>
{% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Accept: application/json&#34; -H &#34;Linx-Randomize: yes&#34; -T myphoto.jpg {{ siteurl }}upload/
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Accept: application/json&#34;{% if not forcerandom %} -H &#34;Linx-Randomize: yes&#34;{% endif %} -T myphoto.jpg {{ siteurl }}upload/
{&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;,
&#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre>
{% else %}
<pre><code>$ curl -H &#34;Accept: application/json&#34; -H &#34;Linx-Randomize: yes&#34; -T myphoto.jpg {{ siteurl }}upload/
<pre><code>$ curl -H &#34;Accept: application/json&#34;{% if not forcerandom %} -H &#34;Linx-Randomize: yes&#34;{% endif %} -T myphoto.jpg {{ siteurl }}upload/
{&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;,
&#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre>
{% endif %}
@ -121,6 +124,7 @@ DELETED</code></pre>
<blockquote>
<p>“url”: the publicly available upload url<br/>
“direct_url”: the url to access the file directly<br/>
“filename”: the (optionally generated) filename<br/>
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
“size”: the size in bytes of the file<br/>

3
templates/base.html

@ -3,7 +3,8 @@
<head>
<title>{% block title %}{{ sitename }}{% endblock %}</title>
<meta charset='utf-8' content='text/html' http-equiv='content-type'>
<link href='{{ sitepath }}static/css/linx.css' media='screen, projection' rel='stylesheet' type='text/css'>
<meta name='viewport' content='width=device-width, initial-scale=1.0'>
<link href='{{ sitepath }}static/css/linx.css?v=1' media='screen, projection' rel='stylesheet' type='text/css'>
<link href='{{ sitepath }}static/css/hint.css' rel='stylesheet' type='text/css'>
<link href='{{ sitepath }}static/images/favicon.gif' rel='icon' type='image/gif'>
{% block head %}{% endblock %}

4
templates/display/audio.html

@ -2,8 +2,8 @@
{% block main %}
<audio class="display-audio" controls preload='auto'>
<source src='{{ sitepath }}selif/{{ filename }}'>
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
<source src='{{ sitepath }}{{ selifpath }}{{ filename }}'>
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
</audio>
{% endblock %}

22
templates/display/base.html

@ -6,32 +6,22 @@
{% block content %}
<div id="info" class="dinfo">
<div class="float-left" id="filename">
<div id="info" class="dinfo info-flex">
<div id="filename">
{{ filename }}
</div>
<div class="right">
<div class="info-actions">
{% if expiry %}
<span>file expires in {{ expiry }}</span> |
{% endif %}
{% block infomore %}{% endblock %}
<span>{{ size }}</span> |
{% if shorturlEnabled %}
{% if shorturl %}
<a class="hint--top" aria-label="Click to copy into clipboard" id="shorturl"
style="cursor: pointer;" href="{{shorturl}}">{{shorturl}}</a> |
{% else %}
<a class="hint--top" aria-label="Click to retrieve shortened url" id="shorturl"
data-url="{{ sitepath }}{{filename}}/short" style="cursor: pointer;">short url</a> |
{% endif %}
{% endif %}
<a href="{{ filename }}/torrent" download>torrent</a> |
<a href="{{ sitepath }}selif/{{ filename }}" download>get</a>
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" download>get</a>
</div>
{% block infoleft %}{% endblock %}
<div class="clear"></div>
</div>
<div id="main" {% block mainmore %}{% endblock %}>
@ -43,8 +33,4 @@
</div>
<script src="{{ sitepath }}static/js/clipboard.js"></script>
{% if shorturlEnabled %}
<script src="{{ sitepath }}static/js/shorturl.js"></script>
{% endif %}
{% endblock %}

37
templates/display/bin.html

@ -11,37 +11,34 @@
{% block infoleft %}
<div id="editform">
<form id="reply" action='{{ sitepath }}upload' method='post' >
<div class="right">
<select id="expiry" name="expires">
<option disabled=disabled>Expires:</option>
<option value="0">never</option>
<option value="60">a minute</option>
<option value="300">5 minutes</option>
<option value="3600">an hour</option>
<option value="86400">a day</option>
<option value="604800">a week</option>
<option value="2419200">a month</option>
<option value="29030400">a year</option>
</select>
<button id="save">save</button>
<form id="reply" action='{{ sitepath }}upload' method='post'>
<div class="info-flex">
<div>
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
</div>
<div class="info-actions">
<select id="expiry" name="expires">
<option disabled>Expires:</option>
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %}
</select>
<button type="submit" id="save">Save</button>
</div>
</div>
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
<textarea name='content' id="newcontent" class="editor"></textarea>
</form>
</div>
{% endblock %}
{%block infomore %}
{% block infomore %}
<label>wrap <input id="wordwrap" type="checkbox" checked></label> |
{% endblock %}
{% block main %}
<div id="normal-content" class="normal fixed">
<pre id="normal-code"><code id="codeb" class="{{ extra.lang_hl }}">{{ extra.contents }}</code></pre>
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
</div>
@ -51,5 +48,5 @@
{% endif %}
<script src="{{ sitepath }}static/js/util.js"></script>
<script src="{{ sitepath }}static/js/bin.js"></script>
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
{% endblock %}

2
templates/display/file.html

@ -2,7 +2,7 @@
{% block main %}
<div class="normal display-file">
<p class="center">You are requesting <a href="{{ sitepath }}selif/{{ filename }}">{{ filename }}</a>, <a href="{{ sitepath }}selif/{{ filename }}">click here</a> to download.</p>
<p class="center">You are requesting <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">{{ filename }}</a>, <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here</a> to download.</p>
{% if files|length > 0 %}
<p>Contents of the archive:</p>

4
templates/display/image.html

@ -1,7 +1,7 @@
{% extends "base.html" %}
{% block main %}
<a href="{{ sitepath }}selif/{{ filename }}">
<img class="display-image" src="{{ sitepath }}selif/{{ filename }}" />
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}">
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
</a>
{% endblock %}

4
templates/display/pdf.html

@ -1,10 +1,10 @@
{% extends "base.html" %}
{% block main %}
<object class="display-pdf" data="{{ sitepath }}selif/{{ filename }}" type="application/pdf">
<object class="display-pdf" data="{{ sitepath }}{{ selifpath }}{{ filename }}" type="application/pdf">
<p>It appears your Web browser is not configured to display PDF files.
No worries, just <a href="{{ sitepath }}selif/{{ filename }}">click here to download the PDF file.</a></p>
No worries, just <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here to download the PDF file.</a></p>
</object>
{% endblock %}

34
templates/display/story.html

@ -9,24 +9,22 @@
{% block infoleft %}
<div id="editform">
<form id="reply" action='{{ sitepath }}upload' method='post' >
<div class="right">
<select id="expiry" name="expires">
<option disabled=disabled>Expires:</option>
<option value="0">never</option>
<option value="60">a minute</option>
<option value="300">5 minutes</option>
<option value="3600">an hour</option>
<option value="86400">a day</option>
<option value="604800">a week</option>
<option value="2419200">a month</option>
<option value="29030400">a year</option>
</select>
<button id="save">save</button>
<form id="reply" action='{{ sitepath }}upload' method='post'>
<div class="info-flex">
<div>
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
</div>
<div class="info-actions">
<select id="expiry" name="expires">
<option disabled>Expires:</option>
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %}
</select>
<button type="submit" id="save">Save</button>
</div>
</div>
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
<textarea name='content' id="newcontent" class="editor"></textarea>
</form>
</div>
@ -39,10 +37,10 @@
{% block main %}
<div id="normal-content" class="normal">
<pre id="normal-code"><code id="codeb" class="story">{% for line in lines %}{% if line|make_list|first == ">" %}<span class="storygreen">{{ line }}</span>{% else %}<span class="storyred">{{ line }}</span>{% endif %}{% endfor %}</code></pre>
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
</div>
<script src="{{ sitepath }}static/js/util.js"></script>
<script src="{{ sitepath }}static/js/bin.js"></script>
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
{% endblock %}

4
templates/display/video.html

@ -2,7 +2,7 @@
{% block main %}
<video class="display-video" controls autoplay>
<source src="{{ sitepath }}selif/{{ filename }}"/>
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
<source src="{{ sitepath }}{{ selifpath }}{{ filename }}"/>
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
</video>
{% endblock %}

6
templates/index.html

@ -13,12 +13,13 @@
</div>
<div id="dzone" class="dz-default dz-message">
<span>Click or Drop file(s)</span>
<span>Click or Drop file(s) or Paste image</span>
</div>
<div id="choices">
<label>{% if not forcerandom %}<input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename{% endif %}</label>
<div id="expiry">
<label>File expiry:
<label>File expiry:
<select name="expires" id="expires">
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
@ -26,7 +27,6 @@
</select>
</label>
</div>
<label><input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename</label>
</div>
<div class="clear"></div>
</form>

20
templates/paste.html

@ -2,24 +2,24 @@
{% block content %}
<form id="reply" action='{{ sitepath }}upload' method='post'>
<div id="main">
<div id="info" class="ninfo">
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)" />.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
<div class="right">
<div id="main" class="paste">
<div id="info" class="info-flex">
<div>
{% if not forcerandom %}<span class="hint--top hint--bounce" data-hint="Leave empty for random filename"><input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename" /></span>{% endif %}.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
</div>
<div>
<select id="expiry" name="expires">
<option disabled="disabled">Expires:</option>
<option disabled>Expires:</option>
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %}
</select>
<input type="submit" value="Paste">
<button type="submit">Paste</button>
</div>
</div>
<div id="inner_content">
<textarea name='content' id="content" class="editor"></textarea>
<div id="inner_content" class="padme">
<textarea name='content' id="content" class="editor"></textarea>
</div>
</div>
</form>

64
torrent.go

@ -2,65 +2,44 @@ package main
import (
"bytes"
"crypto/sha1"
"fmt"
"io"
"net/http"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/andreimarcu/linx-server/torrent"
"github.com/zeebo/bencode"
"github.com/zenazn/goji/web"
)
const (
TORRENT_PIECE_LENGTH = 262144
)
type TorrentInfo struct {
PieceLength int `bencode:"piece length"`
Pieces string `bencode:"pieces"`
Name string `bencode:"name"`
Length int `bencode:"length"`
}
type Torrent struct {
Encoding string `bencode:"encoding"`
Info TorrentInfo `bencode:"info"`
UrlList []string `bencode:"url-list"`
}
func hashPiece(piece []byte) []byte {
h := sha1.New()
h.Write(piece)
return h.Sum(nil)
}
func createTorrent(fileName string, f io.Reader, r *http.Request) ([]byte, error) {
url := getSiteURL(r) + Config.selifPath + fileName
chunk := make([]byte, torrent.TORRENT_PIECE_LENGTH)
func createTorrent(fileName string, f io.ReadCloser, r *http.Request) ([]byte, error) {
chunk := make([]byte, TORRENT_PIECE_LENGTH)
torrent := Torrent{
t := torrent.Torrent{
Encoding: "UTF-8",
Info: TorrentInfo{
PieceLength: TORRENT_PIECE_LENGTH,
Info: torrent.TorrentInfo{
PieceLength: torrent.TORRENT_PIECE_LENGTH,
Name: fileName,
},
UrlList: []string{fmt.Sprintf("%sselif/%s", getSiteURL(r), fileName)},
UrlList: []string{url},
}
for {
n, err := f.Read(chunk)
n, err := io.ReadFull(f, chunk)
if err == io.EOF {
break
} else if err != nil {
} else if err != nil && err != io.ErrUnexpectedEOF {
return []byte{}, err
}
torrent.Info.Length += n
torrent.Info.Pieces += string(hashPiece(chunk[:n]))
t.Info.Length += n
t.Info.Pieces += string(torrent.HashPiece(chunk[:n]))
}
data, err := bencode.EncodeBytes(&torrent)
data, err := bencode.EncodeBytes(&t)
if err != nil {
return []byte{}, err
}
@ -71,21 +50,24 @@ func createTorrent(fileName string, f io.ReadCloser, r *http.Request) ([]byte, e
func fileTorrentHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, f, err := storageBackend.Get(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r)
return
} else if err == backends.BadMetadata {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, err.Error())
return
}
defer f.Close()
f, err := fileBackend.Open(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, "Could not create torrent.")
if expiry.IsTsExpired(metadata.Expiry) {
storageBackend.Delete(fileName)
notFoundHandler(c, w, r)
return
}
defer f.Close()
encoded, err := createTorrent(fileName, f, r)
if err != nil {

28
torrent/torrent.go

@ -0,0 +1,28 @@
package torrent
import (
"crypto/sha1"
)
const (
TORRENT_PIECE_LENGTH = 262144
)
type TorrentInfo struct {
PieceLength int `bencode:"piece length"`
Pieces string `bencode:"pieces"`
Name string `bencode:"name"`
Length int `bencode:"length"`
}
type Torrent struct {
Encoding string `bencode:"encoding"`
Info TorrentInfo `bencode:"info"`
UrlList []string `bencode:"url-list"`
}
func HashPiece(piece []byte) []byte {
h := sha1.New()
h.Write(piece)
return h.Sum(nil)
}

7
torrent_test.go

@ -5,12 +5,13 @@ import (
"os"
"testing"
"github.com/andreimarcu/linx-server/torrent"
"github.com/zeebo/bencode"
)
func TestCreateTorrent(t *testing.T) {
fileName := "server.go"
var decoded Torrent
var decoded torrent.Torrent
f, err := os.Open("server.go")
if err != nil {
@ -45,14 +46,14 @@ func TestCreateTorrent(t *testing.T) {
t.Fatal("Length was less than or equal to 0, expected more")
}
tracker := fmt.Sprintf("%sselif/%s", Config.siteURL, fileName)
tracker := fmt.Sprintf("%s%s%s", Config.siteURL, Config.selifPath, fileName)
if decoded.UrlList[0] != tracker {
t.Fatalf("First entry in URL list was %s, expected %s", decoded.UrlList[0], tracker)
}
}
func TestCreateTorrentWithImage(t *testing.T) {
var decoded Torrent
var decoded torrent.Torrent
f, err := os.Open("static/images/404.jpg")
if err != nil {

125
upload.go

@ -22,6 +22,7 @@ import (
"gopkg.in/h2non/filetype.v1"
)
var FileTooLargeError = errors.New("File too large.")
var fileBlacklist = map[string]bool{
"favicon.ico": true,
"index.htm": true,
@ -34,10 +35,11 @@ var fileBlacklist = map[string]bool{
// Describes metadata directly from the user request
type UploadRequest struct {
src io.Reader
size int64
filename string
expiry time.Duration // Seconds until expiry, 0 = never
deleteKey string // Empty string if not defined
randomBarename bool
deletionKey string // Empty string if not defined
}
// Metadata associated with a file as it would actually be stored
@ -48,7 +50,7 @@ type Upload struct {
func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if !strictReferrerCheck(r, getSiteURL(r), []string{"Linx-Delete-Key", "Linx-Expiry", "Linx-Randomize", "X-Requested-With"}) {
badRequestHandler(c, w, r)
badRequestHandler(c, w, r, RespAUTO, "")
return
}
@ -65,32 +67,39 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
}
defer file.Close()
r.ParseForm()
if r.Form.Get("randomize") == "true" {
upReq.randomBarename = true
}
upReq.expiry = parseExpiry(r.Form.Get("expires"))
upReq.src = file
upReq.size = headers.Size
upReq.filename = headers.Filename
} else {
if r.FormValue("content") == "" {
oopsHandler(c, w, r, RespHTML, "Empty file")
if r.PostFormValue("content") == "" {
badRequestHandler(c, w, r, RespAUTO, "Empty file")
return
}
extension := r.FormValue("extension")
extension := r.PostFormValue("extension")
if extension == "" {
extension = "txt"
}
upReq.src = strings.NewReader(r.FormValue("content"))
upReq.expiry = parseExpiry(r.FormValue("expires"))
upReq.filename = r.FormValue("filename") + "." + extension
content := r.PostFormValue("content")
upReq.src = strings.NewReader(content)
upReq.size = int64(len(content))
upReq.filename = r.PostFormValue("filename") + "." + extension
}
upReq.expiry = parseExpiry(r.PostFormValue("expires"))
if r.PostFormValue("randomize") == "true" {
upReq.randomBarename = true
}
upload, err := processUpload(upReq)
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespJSON, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
return
}
@ -99,14 +108,16 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.Write(js)
} else {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespHTML, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespHTML, "Could not upload file: "+err.Error())
return
}
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
}
}
func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
@ -115,12 +126,15 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
upReq.filename = c.URLParams["name"]
upReq.src = r.Body
upReq.src = http.MaxBytesReader(w, r.Body, Config.maxSize)
upload, err := processUpload(upReq)
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespJSON, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
return
}
@ -129,7 +143,10 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.Write(js)
} else {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespPLAIN, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespPLAIN, "Could not upload file: "+err.Error())
return
}
@ -162,8 +179,8 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
}
upReq.filename = filepath.Base(grabUrl.Path)
upReq.src = resp.Body
upReq.deletionKey = r.FormValue("deletekey")
upReq.src = http.MaxBytesReader(w, resp.Body, Config.maxSize)
upReq.deleteKey = r.FormValue("deletekey")
upReq.randomBarename = r.FormValue("randomize") == "yes"
upReq.expiry = parseExpiry(r.FormValue("expiry"))
@ -193,20 +210,26 @@ func uploadHeaderProcess(r *http.Request, upReq *UploadRequest) {
upReq.randomBarename = true
}
upReq.deletionKey = r.Header.Get("Linx-Delete-Key")
upReq.deleteKey = r.Header.Get("Linx-Delete-Key")
// Get seconds until expiry. Non-integer responses never expire.
expStr := r.Header.Get("Linx-Expiry")
upReq.expiry = parseExpiry(expStr)
}
func processUpload(upReq UploadRequest) (upload Upload, err error) {
// Determine the appropriate filename, then write to disk
if upReq.size > Config.maxSize {
return upload, FileTooLargeError
}
// Determine the appropriate filename
barename, extension := barePlusExt(upReq.filename)
randomize := false
// Randomize the "barename" (filename without extension) if needed
if upReq.randomBarename || len(barename) == 0 {
barename = generateBarename()
randomize = true
}
var header []byte
@ -215,7 +238,7 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
header = make([]byte, 512)
n, _ := upReq.src.Read(header)
if n == 0 {
return upload, errors.New("Empty file")
return upload, backends.FileEmptyError
}
header = header[:n]
@ -231,28 +254,44 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
upload.Filename = strings.Join([]string{barename, extension}, ".")
upload.Filename = strings.Replace(upload.Filename, " ", "", -1)
fileexists, _ := fileBackend.Exists(upload.Filename)
fileexists, _ := storageBackend.Exists(upload.Filename)
// Check if the delete key matches, in which case overwrite
if fileexists {
metad, merr := metadataRead(upload.Filename)
metad, merr := storageBackend.Head(upload.Filename)
if merr == nil {
if upReq.deletionKey == metad.DeleteKey {
if upReq.deleteKey == metad.DeleteKey {
fileexists = false
} else if Config.forceRandomFilename == true {
// the file exists
// the delete key doesn't match
// force random filenames is enabled
randomize = true
}
}
} else if Config.forceRandomFilename == true {
// the file doesn't exist
// force random filenames is enabled
randomize = true
// set fileexists to true to generate a new barename
fileexists = true
}
for fileexists {
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
if err != nil {
barename = barename + "1"
if randomize {
barename = generateBarename()
} else {
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
if err != nil {
barename = barename + "1"
} else {
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
}
}
upload.Filename = strings.Join([]string{barename, extension}, ".")
fileexists, err = fileBackend.Exists(upload.Filename)
fileexists, err = storageBackend.Exists(upload.Filename)
}
if fileBlacklist[strings.ToLower(upload.Filename)] {
@ -267,24 +306,15 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
fileExpiry = time.Now().Add(upReq.expiry)
}
bytes, err := fileBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src))
if err != nil {
return upload, err
} else if bytes > Config.maxSize {
fileBackend.Delete(upload.Filename)
return upload, errors.New("File too large")
if upReq.deleteKey == "" {
upReq.deleteKey = uniuri.NewLen(30)
}
upload.Metadata, err = generateMetadata(upload.Filename, fileExpiry, upReq.deletionKey)
if err != nil {
fileBackend.Delete(upload.Filename)
return
}
err = metadataWrite(upload.Filename, &upload.Metadata)
upload.Metadata, err = storageBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src), fileExpiry, upReq.deleteKey)
if err != nil {
fileBackend.Delete(upload.Filename)
return
return upload, err
}
return
}
@ -295,6 +325,7 @@ func generateBarename() string {
func generateJSONresponse(upload Upload, r *http.Request) []byte {
js, _ := json.Marshal(map[string]string{
"url": getSiteURL(r) + upload.Filename,
"direct_url": getSiteURL(r) + Config.selifPath + upload.Filename,
"filename": upload.Filename,
"delete_key": upload.Metadata.DeleteKey,
"expiry": strconv.FormatInt(upload.Metadata.Expiry.Unix(), 10),

Loading…
Cancel
Save