Browse Source

Merge remote-tracking branch 'upstream/master'

pull/123/head
xtrafrancyz 6 years ago
parent
commit
3d2bbc61e4
  1. 2
      .gitignore
  2. 4
      .travis.yml
  3. 23
      Dockerfile
  4. 42
      README.md
  5. 23
      backends/backends.go
  6. 156
      backends/localfs/localfs.go
  7. 17
      backends/meta.go
  8. 195
      backends/s3/s3.go
  9. 24
      backends/storage.go
  10. 117
      build.sh
  11. 11
      csp.go
  12. 12
      csp_test.go
  13. 19
      delete.go
  14. 72
      display.go
  15. 25
      expiry.go
  16. 13
      expiry/expiry.go
  17. 62
      fileserve.go
  18. 70
      helpers/archive.go
  19. 67
      helpers/helpers.go
  20. 27
      httputil/LICENSE
  21. 218
      httputil/conditional.go
  22. 46
      linx-cleanup/cleanup.go
  23. 222
      meta.go
  24. 46
      pages.go
  25. 67
      server.go
  26. 206
      server_test.go
  27. 89
      shorturl.go
  28. 10
      static/css/dropzone.css
  29. 3
      static/css/github-markdown.css
  30. 205
      static/css/linx.css
  31. 6
      static/js/bin.js
  32. 39
      static/js/shorturl.js
  33. 171
      static/js/upload.js
  34. 1
      templates.go
  35. 4
      templates/404.html
  36. 16
      templates/API.html
  37. 3
      templates/base.html
  38. 4
      templates/display/audio.html
  39. 22
      templates/display/base.html
  40. 37
      templates/display/bin.html
  41. 4
      templates/display/file.html
  42. 4
      templates/display/image.html
  43. 4
      templates/display/pdf.html
  44. 34
      templates/display/story.html
  45. 4
      templates/display/video.html
  46. 4
      templates/index.html
  47. 20
      templates/paste.html
  48. 67
      torrent.go
  49. 28
      torrent/torrent.go
  50. 7
      torrent_test.go
  51. 143
      upload.go

2
.gitignore

@ -29,3 +29,5 @@ _testmain.go
linx-server linx-server
files/ files/
meta/ meta/
binaries/
linx-cleanup

4
.travis.yml

@ -1,8 +1,8 @@
language: go language: go
go: go:
- 1.5
- 1.6
- "1.10"
- "1.11"
before_script: before_script:
- go vet ./... - go vet ./...

23
Dockerfile

@ -1,13 +1,28 @@
FROM golang:alpine
FROM golang:alpine3.8 AS build
COPY . /go/src/github.com/andreimarcu/linx-server
WORKDIR /go/src/github.com/andreimarcu/linx-server
RUN set -ex \ RUN set -ex \
&& apk add --no-cache --virtual .build-deps git mercurial \
&& go get github.com/andreimarcu/linx-server \
&& apk add --no-cache --virtual .build-deps git \
&& go get -v . \
&& apk del .build-deps && apk del .build-deps
FROM alpine:3.8
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server
ENV GOPATH /go
ENV SSL_CERT_FILE /etc/ssl/cert.pem
COPY static /go/src/github.com/andreimarcu/linx-server/static/
COPY templates /go/src/github.com/andreimarcu/linx-server/templates/
RUN mkdir -p /data/files && mkdir -p /data/meta && chown -R 65534:65534 /data
VOLUME ["/data/files", "/data/meta"] VOLUME ["/data/files", "/data/meta"]
EXPOSE 8080 EXPOSE 8080
USER nobody USER nobody
ENTRYPOINT ["/go/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
ENTRYPOINT ["/usr/local/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
CMD ["-sitename=linx", "-allowhotlink"] CMD ["-sitename=linx", "-allowhotlink"]

42
README.md

@ -16,8 +16,7 @@ Self-hosted file/media sharing website.
### Screenshots ### Screenshots
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530123/4211e946-7372-11e5-9cb5-9956c5c49d95.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530124/4217db8a-7372-11e5-957d-b3abb873dc80.png" />
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530844/48d6d4e2-7379-11e5-8886-d4c32c416cbc.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530845/48dc9ae4-7379-11e5-9e59-959f7c40a573.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530846/48df08ec-7379-11e5-89f6-5c3f6372384d.png" />
<img width="200" src="https://user-images.githubusercontent.com/4650950/51735725-0033cf00-203d-11e9-8a97-f543330a92ec.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735724-0033cf00-203d-11e9-8fe0-77442eaa8705.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735726-0033cf00-203d-11e9-9fca-095a97e46ce8.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735728-0033cf00-203d-11e9-90e9-4f2d36332fc4.png" />
Get release and run Get release and run
@ -41,18 +40,28 @@ allowhotlink = true
#### Options #### Options
- ```-bind 127.0.0.1:8080``` -- what to bind to (default is 127.0.0.1:8080) - ```-bind 127.0.0.1:8080``` -- what to bind to (default is 127.0.0.1:8080)
- ```-sitename myLinx``` -- the site name displayed on top (default is inferred from Host header) - ```-sitename myLinx``` -- the site name displayed on top (default is inferred from Host header)
- ```-siteurl "http://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
- ```-filespath files/``` -- Path to store uploads (default is files/)
- ```-metapath meta/``` -- Path to store information about uploads (default is meta/)
- ```-siteurl "https://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
- ```-selifpath "selif"``` -- path relative to site base url (the "selif" in mylinx.example.org/selif/image.jpg) where files are accessed directly (default: selif)
- ```-maxsize 4294967296``` -- maximum upload file size in bytes (default 4GB) - ```-maxsize 4294967296``` -- maximum upload file size in bytes (default 4GB)
- ```-maxexpiry 86400``` -- maximum expiration time in seconds (default is 0, which is no expiry) - ```-maxexpiry 86400``` -- maximum expiration time in seconds (default is 0, which is no expiry)
- ```-allowhotlink``` -- Allow file hotlinking - ```-allowhotlink``` -- Allow file hotlinking
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
- ```-refererpolicy "..."``` -- Referrer-Policy header for pages (default is "same-origin")
- ```-filereferrerpolicy "..."``` -- Referrer-Policy header for files (default is "same-origin")
- ```-xframeoptions "..." ``` -- X-Frame-Options header (default is "SAMEORIGIN") - ```-xframeoptions "..." ``` -- X-Frame-Options header (default is "SAMEORIGIN")
- ```-remoteuploads``` -- (optionally) enable remote uploads (/upload?url=https://...) - ```-remoteuploads``` -- (optionally) enable remote uploads (/upload?url=https://...)
- ```-nologs``` -- (optionally) disable request logs in stdout - ```-nologs``` -- (optionally) disable request logs in stdout
- ```-googleapikey``` -- (optionally) API Key for Google's URL Shortener. ([How to create one](https://developers.google.com/url-shortener/v1/getting_started#APIKey))
- ```-force-random-filename``` -- (optionally) force the use of random filenames
#### Storage backends
The following storage backends are available:
|Name|Notes|Options
|----|-----|-------
|LocalFS|Enabled by default, this backend uses the filesystem|```-filespath files/``` -- Path to store uploads (default is files/)<br />```-metapath meta/``` -- Path to store information about uploads (default is meta/)|
|S3|Use with any S3-compatible provider.<br> This implementation will stream files through the linx instance (every download will request and stream the file from the S3 bucket).<br><br>For high-traffic environments, one might consider using an external caching layer such as described [in this article](https://blog.sentry.io/2017/03/01/dodging-s3-downtime-with-nginx-and-haproxy.html).|```-s3-endpoint https://...``` -- S3 endpoint<br>```-s3-region us-east-1``` -- S3 region<br>```-s3-bucket mybucket``` -- S3 bucket to use for files and metadata<br>```-s3-force-path-style``` (optional) -- force path-style addresing (e.g. https://<span></span>s3.amazonaws.com/linx/example.txt)<br><br>Environment variables to provide:<br>```AWS_ACCESS_KEY_ID``` -- the S3 access key<br>```AWS_SECRET_ACCESS_KEY ``` -- the S3 secret key<br>```AWS_SESSION_TOKEN``` (optional) -- the S3 session token|
#### SSL with built-in server #### SSL with built-in server
- ```-certfile path/to/your.crt``` -- Path to the ssl certificate (required if you want to use the https server) - ```-certfile path/to/your.crt``` -- Path to the ssl certificate (required if you want to use the https server)
@ -71,6 +80,23 @@ allowhotlink = true
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files. A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
Cleaning up expired files
-------------------------
When files expire, access is disabled immediately, but the files and metadata
will persist on disk until someone attempts to access them. If you'd like to
automatically clean up files that have expired, you can use the included
`linx-cleanup` utility. To run it automatically, use a cronjob or similar type
of scheduled task.
You should be careful to ensure that only one instance of `linx-client` runs at
a time to avoid unexpected behavior. It does not implement any type of locking.
#### Options
- ```-filespath files/``` -- Path to stored uploads (default is files/)
- ```-metapath meta/``` -- Path to stored information about uploads (default is meta/)
- ```-nologs``` -- (optionally) disable deletion logs in stdout
Deployment Deployment
---------- ----------
Linx-server supports being deployed in a subdirectory (ie. example.com/mylinx/) as well as on its own (example.com/). Linx-server supports being deployed in a subdirectory (ie. example.com/mylinx/) as well as on its own (example.com/).

23
backends/backends.go

@ -1,23 +0,0 @@
package backends
import (
"io"
"net/http"
)
type ReadSeekCloser interface {
io.Reader
io.Closer
io.Seeker
io.ReaderAt
}
type StorageBackend interface {
Delete(key string) error
Exists(key string) (bool, error)
Get(key string) ([]byte, error)
Put(key string, r io.Reader) (int64, error)
Open(key string) (ReadSeekCloser, error)
ServeFile(key string, w http.ResponseWriter, r *http.Request)
Size(key string) (int64, error)
}

156
backends/localfs/localfs.go

@ -1,63 +1,149 @@
package localfs package localfs
import ( import (
"errors"
"encoding/json"
"io" "io"
"io/ioutil" "io/ioutil"
"net/http"
"os" "os"
"path" "path"
"time"
"github.com/andreimarcu/linx-server/backends" "github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/helpers"
) )
type LocalfsBackend struct { type LocalfsBackend struct {
basePath string
metaPath string
filesPath string
} }
func (b LocalfsBackend) Delete(key string) error {
return os.Remove(path.Join(b.basePath, key))
type MetadataJSON struct {
DeleteKey string `json:"delete_key"`
Sha256sum string `json:"sha256sum"`
Mimetype string `json:"mimetype"`
Size int64 `json:"size"`
Expiry int64 `json:"expiry"`
ArchiveFiles []string `json:"archive_files,omitempty"`
}
func (b LocalfsBackend) Delete(key string) (err error) {
err = os.Remove(path.Join(b.filesPath, key))
if err != nil {
return
}
err = os.Remove(path.Join(b.metaPath, key))
return
} }
func (b LocalfsBackend) Exists(key string) (bool, error) { func (b LocalfsBackend) Exists(key string) (bool, error) {
_, err := os.Stat(path.Join(b.basePath, key))
_, err := os.Stat(path.Join(b.filesPath, key))
return err == nil, err return err == nil, err
} }
func (b LocalfsBackend) Get(key string) ([]byte, error) {
return ioutil.ReadFile(path.Join(b.basePath, key))
func (b LocalfsBackend) Head(key string) (metadata backends.Metadata, err error) {
f, err := os.Open(path.Join(b.metaPath, key))
if os.IsNotExist(err) {
return metadata, backends.NotFoundErr
} else if err != nil {
return metadata, backends.BadMetadata
}
defer f.Close()
decoder := json.NewDecoder(f)
mjson := MetadataJSON{}
if err := decoder.Decode(&mjson); err != nil {
return metadata, backends.BadMetadata
}
metadata.DeleteKey = mjson.DeleteKey
metadata.Mimetype = mjson.Mimetype
metadata.ArchiveFiles = mjson.ArchiveFiles
metadata.Sha256sum = mjson.Sha256sum
metadata.Expiry = time.Unix(mjson.Expiry, 0)
metadata.Size = mjson.Size
return
} }
func (b LocalfsBackend) Put(key string, r io.Reader) (int64, error) {
dst, err := os.Create(path.Join(b.basePath, key))
func (b LocalfsBackend) Get(key string) (metadata backends.Metadata, f io.ReadCloser, err error) {
metadata, err = b.Head(key)
if err != nil { if err != nil {
return 0, err
return
}
f, err = os.Open(path.Join(b.filesPath, key))
if err != nil {
return
}
return
}
func (b LocalfsBackend) writeMetadata(key string, metadata backends.Metadata) error {
metaPath := path.Join(b.metaPath, key)
mjson := MetadataJSON{
DeleteKey: metadata.DeleteKey,
Mimetype: metadata.Mimetype,
ArchiveFiles: metadata.ArchiveFiles,
Sha256sum: metadata.Sha256sum,
Expiry: metadata.Expiry.Unix(),
Size: metadata.Size,
}
dst, err := os.Create(metaPath)
if err != nil {
return err
}
defer dst.Close()
encoder := json.NewEncoder(dst)
err = encoder.Encode(mjson)
if err != nil {
os.Remove(metaPath)
return err
}
return nil
}
func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
filePath := path.Join(b.filesPath, key)
dst, err := os.Create(filePath)
if err != nil {
return
} }
defer dst.Close() defer dst.Close()
bytes, err := io.Copy(dst, r) bytes, err := io.Copy(dst, r)
if bytes == 0 { if bytes == 0 {
b.Delete(key)
return bytes, errors.New("Empty file")
os.Remove(filePath)
return m, backends.FileEmptyError
} else if err != nil { } else if err != nil {
b.Delete(key)
return bytes, err
os.Remove(filePath)
return m, err
} }
return bytes, err
}
m.Expiry = expiry
m.DeleteKey = deleteKey
m.Size = bytes
m.Mimetype, _ = helpers.DetectMime(dst)
m.Sha256sum, _ = helpers.Sha256sum(dst)
m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, dst)
func (b LocalfsBackend) Open(key string) (backends.ReadSeekCloser, error) {
return os.Open(path.Join(b.basePath, key))
}
err = b.writeMetadata(key, m)
if err != nil {
os.Remove(filePath)
return
}
func (b LocalfsBackend) ServeFile(key string, w http.ResponseWriter, r *http.Request) {
filePath := path.Join(b.basePath, key)
http.ServeFile(w, r, filePath)
return
} }
func (b LocalfsBackend) Size(key string) (int64, error) { func (b LocalfsBackend) Size(key string) (int64, error) {
fileInfo, err := os.Stat(path.Join(b.basePath, key))
fileInfo, err := os.Stat(path.Join(b.filesPath, key))
if err != nil { if err != nil {
return 0, err return 0, err
} }
@ -65,6 +151,24 @@ func (b LocalfsBackend) Size(key string) (int64, error) {
return fileInfo.Size(), nil return fileInfo.Size(), nil
} }
func NewLocalfsBackend(basePath string) LocalfsBackend {
return LocalfsBackend{basePath: basePath}
func (b LocalfsBackend) List() ([]string, error) {
var output []string
files, err := ioutil.ReadDir(b.filesPath)
if err != nil {
return nil, err
}
for _, file := range files {
output = append(output, file.Name())
}
return output, nil
}
func NewLocalfsBackend(metaPath string, filesPath string) LocalfsBackend {
return LocalfsBackend{
metaPath: metaPath,
filesPath: filesPath,
}
} }

17
backends/meta.go

@ -0,0 +1,17 @@
package backends
import (
"errors"
"time"
)
type Metadata struct {
DeleteKey string
Sha256sum string
Mimetype string
Size int64
Expiry time.Time
ArchiveFiles []string
}
var BadMetadata = errors.New("Corrupted metadata.")

195
backends/s3/s3.go

@ -0,0 +1,195 @@
package s3
import (
"io"
"io/ioutil"
"os"
"strconv"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/helpers"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
type S3Backend struct {
bucket string
svc *s3.S3
}
func (b S3Backend) Delete(key string) error {
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
return err
}
return nil
}
func (b S3Backend) Exists(key string) (bool, error) {
_, err := b.svc.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
return err == nil, err
}
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) {
var result *s3.HeadObjectOutput
result, err = b.svc.HeadObject(&s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
err = backends.NotFoundErr
}
}
return
}
metadata, err = unmapMetadata(result.Metadata)
return
}
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) {
var result *s3.GetObjectOutput
result, err = b.svc.GetObject(&s3.GetObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
err = backends.NotFoundErr
}
}
return
}
metadata, err = unmapMetadata(result.Metadata)
r = result.Body
return
}
func mapMetadata(m backends.Metadata) map[string]*string {
return map[string]*string{
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)),
"Delete_key": aws.String(m.DeleteKey),
"Size": aws.String(strconv.FormatInt(m.Size, 10)),
"Mimetype": aws.String(m.Mimetype),
"Sha256sum": aws.String(m.Sha256sum),
}
}
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) {
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64)
if err != nil {
return m, err
}
m.Expiry = time.Unix(expiry, 0)
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64)
if err != nil {
return
}
m.DeleteKey = aws.StringValue(input["Delete_key"])
m.Mimetype = aws.StringValue(input["Mimetype"])
m.Sha256sum = aws.StringValue(input["Sha256sum"])
return
}
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
tmpDst, err := ioutil.TempFile("", "linx-server-upload")
if err != nil {
return m, err
}
defer tmpDst.Close()
defer os.Remove(tmpDst.Name())
bytes, err := io.Copy(tmpDst, r)
if bytes == 0 {
return m, backends.FileEmptyError
} else if err != nil {
return m, err
}
m.Expiry = expiry
m.DeleteKey = deleteKey
m.Size = bytes
m.Mimetype, _ = helpers.DetectMime(tmpDst)
m.Sha256sum, _ = helpers.Sha256sum(tmpDst)
// XXX: we may not be able to write this to AWS easily
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
uploader := s3manager.NewUploaderWithClient(b.svc)
input := &s3manager.UploadInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
Body: tmpDst,
Metadata: mapMetadata(m),
}
_, err = uploader.Upload(input)
if err != nil {
return
}
return
}
func (b S3Backend) Size(key string) (int64, error) {
input := &s3.HeadObjectInput{
Bucket: aws.String(b.bucket),
Key: aws.String(key),
}
result, err := b.svc.HeadObject(input)
if err != nil {
return 0, err
}
return *result.ContentLength, nil
}
func (b S3Backend) List() ([]string, error) {
var output []string
input := &s3.ListObjectsInput{
Bucket: aws.String(b.bucket),
}
results, err := b.svc.ListObjects(input)
if err != nil {
return nil, err
}
for _, object := range results.Contents {
output = append(output, *object.Key)
}
return output, nil
}
func NewS3Backend(bucket string, region string, endpoint string, forcePathStyle bool) S3Backend {
awsConfig := &aws.Config{}
if region != "" {
awsConfig.Region = aws.String(region)
}
if endpoint != "" {
awsConfig.Endpoint = aws.String(endpoint)
}
if forcePathStyle == true {
awsConfig.S3ForcePathStyle = aws.Bool(true)
}
sess := session.Must(session.NewSession(awsConfig))
svc := s3.New(sess)
return S3Backend{bucket: bucket, svc: svc}
}

24
backends/storage.go

@ -0,0 +1,24 @@
package backends
import (
"errors"
"io"
"time"
)
type StorageBackend interface {
Delete(key string) error
Exists(key string) (bool, error)
Head(key string) (Metadata, error)
Get(key string) (Metadata, io.ReadCloser, error)
Put(key string, r io.Reader, expiry time.Time, deleteKey string) (Metadata, error)
Size(key string) (int64, error)
}
type MetaStorageBackend interface {
StorageBackend
List() ([]string, error)
}
var NotFoundErr = errors.New("File not found.")
var FileEmptyError = errors.New("Empty file")

117
build.sh

@ -1,66 +1,67 @@
#!/bin/bash #!/bin/bash
version="$1"
mkdir -p "binairies/""$version"
name="binairies/""$version""/linx-server-v""$version""_"
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
rice append --exec "$name"osx-amd64
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
rice append --exec "$name"osx-386
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
rice append --exec "$name"freebsd-amd64
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
rice append --exec "$name"freebsd-386
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
rice append --exec "$name"openbsd-amd64
function build_binary_rice {
name="$1"
for arch in amd64 386; do
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
rice append --exec "$name"osx-$arch
done
for arch in amd64 386; do
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
rice append --exec "$name"freebsd-$arch
done
for arch in amd64 386; do
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
rice append --exec "$name"openbsd-$arch
done
for arch in arm arm64 amd64 386; do
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
rice append --exec "$name"linux-$arch
done
for arch in amd64 386; do
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
rice append --exec "$name"windows-$arch.exe
done
}
function build_binary {
name="$1"
for arch in amd64 386; do
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
done
for arch in amd64 386; do
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
done
for arch in amd64 386; do
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
done
for arch in arm arm64 amd64 386; do
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
done
for arch in amd64 386; do
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
done
}
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
rice append --exec "$name"openbsd-386
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
rice append --exec "$name"linux-arm
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
rice append --exec "$name"linux-amd64
GOOS=linux GOARCH=386 go build -o "$name"linux-386
rice append --exec "$name"linux-386
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
rice append --exec "$name"windows-amd64.exe
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
rice append --exec "$name"windows-386.exe
version="$1"
mkdir -p "binaries/""$version"
build_binary_rice "binaries/""$version""/linx-server-v""$version""_"
cd linx-genkey cd linx-genkey
name="../binairies/""$version""/linx-genkey-v""$version""_"
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
GOOS=linux GOARCH=386 go build -o "$name"linux-386
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
build_binary "../binaries/""$version""/linx-genkey-v""$version""_"
cd ..
cd linx-cleanup
build_binary "../binaries/""$version""/linx-cleanup-v""$version""_"
cd .. cd ..

11
csp.go

@ -6,6 +6,7 @@ import (
const ( const (
cspHeader = "Content-Security-Policy" cspHeader = "Content-Security-Policy"
rpHeader = "Referrer-Policy"
frameOptionsHeader = "X-Frame-Options" frameOptionsHeader = "X-Frame-Options"
) )
@ -15,8 +16,9 @@ type csp struct {
} }
type CSPOptions struct { type CSPOptions struct {
policy string
frame string
policy string
referrerPolicy string
frame string
} }
func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) { func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
@ -25,6 +27,11 @@ func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Add(cspHeader, c.opts.policy) w.Header().Add(cspHeader, c.opts.policy)
} }
// only add a Referrer Policy if one is not already set
if existing := w.Header().Get(rpHeader); existing == "" {
w.Header().Add(rpHeader, c.opts.referrerPolicy)
}
w.Header().Set(frameOptionsHeader, c.opts.frame) w.Header().Set(frameOptionsHeader, c.opts.frame)
c.h.ServeHTTP(w, r) c.h.ServeHTTP(w, r)

12
csp_test.go

@ -12,6 +12,7 @@ import (
var testCSPHeaders = map[string]string{ var testCSPHeaders = map[string]string{
"Content-Security-Policy": "default-src 'none'; style-src 'self';", "Content-Security-Policy": "default-src 'none'; style-src 'self';",
"Referrer-Policy": "strict-origin-when-cross-origin",
"X-Frame-Options": "SAMEORIGIN", "X-Frame-Options": "SAMEORIGIN",
} }
@ -22,8 +23,10 @@ func TestContentSecurityPolicy(t *testing.T) {
Config.maxSize = 1024 * 1024 * 1024 Config.maxSize = 1024 * 1024 * 1024
Config.noLogs = true Config.noLogs = true
Config.siteName = "linx" Config.siteName = "linx"
Config.contentSecurityPolicy = "default-src 'none'; style-src 'self';"
Config.xFrameOptions = "SAMEORIGIN"
Config.selifPath = "selif"
Config.contentSecurityPolicy = testCSPHeaders["Content-Security-Policy"]
Config.referrerPolicy = testCSPHeaders["Referrer-Policy"]
Config.xFrameOptions = testCSPHeaders["X-Frame-Options"]
mux := setup() mux := setup()
w := httptest.NewRecorder() w := httptest.NewRecorder()
@ -34,8 +37,9 @@ func TestContentSecurityPolicy(t *testing.T) {
} }
goji.Use(ContentSecurityPolicy(CSPOptions{ goji.Use(ContentSecurityPolicy(CSPOptions{
policy: testCSPHeaders["Content-Security-Policy"],
frame: testCSPHeaders["X-Frame-Options"],
policy: testCSPHeaders["Content-Security-Policy"],
referrerPolicy: testCSPHeaders["Referrer-Policy"],
frame: testCSPHeaders["X-Frame-Options"],
})) }))
mux.ServeHTTP(w, req) mux.ServeHTTP(w, req)

19
delete.go

@ -3,8 +3,8 @@ package main
import ( import (
"fmt" "fmt"
"net/http" "net/http"
"os"
"github.com/andreimarcu/linx-server/backends"
"github.com/zenazn/goji/web" "github.com/zenazn/goji/web"
) )
@ -13,24 +13,19 @@ func deleteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
filename := c.URLParams["name"] filename := c.URLParams["name"]
// Ensure requested file actually exists
if _, readErr := fileBackend.Exists(filename); os.IsNotExist(readErr) {
// Ensure that file exists and delete key is correct
metadata, err := storageBackend.Head(filename)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r) // 404 - file doesn't exist notFoundHandler(c, w, r) // 404 - file doesn't exist
return return
}
// Ensure delete key is correct
metadata, err := metadataRead(filename)
if err != nil {
} else if err != nil {
unauthorizedHandler(c, w, r) // 401 - no metadata available unauthorizedHandler(c, w, r) // 401 - no metadata available
return return
} }
if metadata.DeleteKey == requestKey { if metadata.DeleteKey == requestKey {
fileDelErr := fileBackend.Delete(filename)
metaDelErr := metaBackend.Delete(filename)
if (fileDelErr != nil) || (metaDelErr != nil) {
err := storageBackend.Delete(filename)
if err != nil {
oopsHandler(c, w, r, RespPLAIN, "Could not delete") oopsHandler(c, w, r, RespPLAIN, "Could not delete")
return return
} }

72
display.go

@ -2,12 +2,16 @@ package main
import ( import (
"encoding/json" "encoding/json"
"io/ioutil"
"net/http" "net/http"
"path/filepath" "path/filepath"
"regexp"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
"github.com/flosch/pongo2" "github.com/flosch/pongo2"
"github.com/microcosm-cc/bluemonday" "github.com/microcosm-cc/bluemonday"
@ -17,22 +21,26 @@ import (
const maxDisplayFileSizeBytes = 1024 * 512 const maxDisplayFileSizeBytes = 1024 * 512
var cliUserAgentRe = regexp.MustCompile("(?i)(lib)?curl|wget")
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) { func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if !Config.noDirectAgents && cliUserAgentRe.MatchString(r.Header.Get("User-Agent")) && !strings.EqualFold("application/json", r.Header.Get("Accept")) {
fileServeHandler(c, w, r)
return
}
fileName := c.URLParams["name"] fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, err := checkFile(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r) notFoundHandler(c, w, r)
return return
}
metadata, err := metadataRead(fileName)
if err != nil {
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.") oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return return
} }
var expiryHuman string var expiryHuman string
if metadata.Expiry != neverExpire {
if metadata.Expiry != expiry.NeverExpire {
expiryHuman = humanize.RelTime(time.Now(), metadata.Expiry, "", "") expiryHuman = humanize.RelTime(time.Now(), metadata.Expiry, "", "")
} }
sizeHuman := humanize.Bytes(uint64(metadata.Size)) sizeHuman := humanize.Bytes(uint64(metadata.Size))
@ -43,11 +51,12 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if strings.EqualFold("application/json", r.Header.Get("Accept")) { if strings.EqualFold("application/json", r.Header.Get("Accept")) {
js, _ := json.Marshal(map[string]string{ js, _ := json.Marshal(map[string]string{
"filename": fileName,
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
"size": strconv.FormatInt(metadata.Size, 10),
"mimetype": metadata.Mimetype,
"sha256sum": metadata.Sha256sum,
"filename": fileName,
"direct_url": getSiteURL(r) + Config.selifPath + fileName,
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
"size": strconv.FormatInt(metadata.Size, 10),
"mimetype": metadata.Mimetype,
"sha256sum": metadata.Sha256sum,
}) })
w.Write(js) w.Write(js)
return return
@ -68,8 +77,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
tpl = Templates["display/pdf.html"] tpl = Templates["display/pdf.html"]
} else if extension == "story" { } else if extension == "story" {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes { if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil { if err == nil {
extra["contents"] = string(bytes) extra["contents"] = string(bytes)
lines = strings.Split(extra["contents"], "\n") lines = strings.Split(extra["contents"], "\n")
@ -78,8 +92,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
} }
} else if extension == "md" { } else if extension == "md" {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes { if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil { if err == nil {
unsafe := blackfriday.MarkdownCommon(bytes) unsafe := blackfriday.MarkdownCommon(bytes)
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
@ -90,8 +109,13 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
} }
} else if strings.HasPrefix(metadata.Mimetype, "text/") || supportedBinExtension(extension) { } else if strings.HasPrefix(metadata.Mimetype, "text/") || supportedBinExtension(extension) {
metadata, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, err.Error())
}
if metadata.Size < maxDisplayFileSizeBytes { if metadata.Size < maxDisplayFileSizeBytes {
bytes, err := fileBackend.Get(fileName)
bytes, err := ioutil.ReadAll(reader)
if err == nil { if err == nil {
extra["extension"] = extension extra["extension"] = extension
extra["lang_hl"], extra["lang_ace"] = extensionToHlAndAceLangs(extension) extra["lang_hl"], extra["lang_ace"] = extensionToHlAndAceLangs(extension)
@ -107,15 +131,15 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
} }
err = renderTemplate(tpl, pongo2.Context{ err = renderTemplate(tpl, pongo2.Context{
"mime": metadata.Mimetype,
"filename": fileName,
"size": sizeHuman,
"expiry": expiryHuman,
"extra": extra,
"lines": lines,
"files": metadata.ArchiveFiles,
"shorturlEnabled": Config.googleShorterAPIKey != "",
"shorturl": metadata.ShortURL,
"mime": metadata.Mimetype,
"filename": fileName,
"size": sizeHuman,
"expiry": expiryHuman,
"expirylist": listExpirationTimes(),
"extra": extra,
"forcerandom": Config.forceRandomFilename,
"lines": lines,
"files": metadata.ArchiveFiles,
}, r, w) }, r, w)
if err != nil { if err != nil {

25
expiry.go

@ -3,6 +3,7 @@ package main
import ( import (
"time" "time"
"github.com/andreimarcu/linx-server/expiry"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
) )
@ -21,22 +22,14 @@ type ExpirationTime struct {
Human string Human string
} }
var neverExpire = time.Unix(0, 0)
// Determine if a file with expiry set to "ts" has expired yet
func isTsExpired(ts time.Time) bool {
now := time.Now()
return ts != neverExpire && now.After(ts)
}
// Determine if the given filename is expired // Determine if the given filename is expired
func isFileExpired(filename string) (bool, error) { func isFileExpired(filename string) (bool, error) {
metadata, err := metadataRead(filename)
metadata, err := storageBackend.Head(filename)
if err != nil { if err != nil {
return false, err return false, err
} }
return isTsExpired(metadata.Expiry), nil
return expiry.IsTsExpired(metadata.Expiry), nil
} }
// Return a list of expiration times and their humanized versions // Return a list of expiration times and their humanized versions
@ -45,16 +38,16 @@ func listExpirationTimes() []ExpirationTime {
actualExpiryInList := false actualExpiryInList := false
var expiryList []ExpirationTime var expiryList []ExpirationTime
for _, expiry := range defaultExpiryList {
if Config.maxExpiry == 0 || expiry <= Config.maxExpiry {
if expiry == Config.maxExpiry {
for _, expiryEntry := range defaultExpiryList {
if Config.maxExpiry == 0 || expiryEntry <= Config.maxExpiry {
if expiryEntry == Config.maxExpiry {
actualExpiryInList = true actualExpiryInList = true
} }
duration := time.Duration(expiry) * time.Second
duration := time.Duration(expiryEntry) * time.Second
expiryList = append(expiryList, ExpirationTime{ expiryList = append(expiryList, ExpirationTime{
expiry,
humanize.RelTime(epoch, epoch.Add(duration), "", ""),
Seconds: expiryEntry,
Human: humanize.RelTime(epoch, epoch.Add(duration), "", ""),
}) })
} }
} }

13
expiry/expiry.go

@ -0,0 +1,13 @@
package expiry
import (
"time"
)
var NeverExpire = time.Unix(0, 0)
// Determine if a file with expiry set to "ts" has expired yet
func IsTsExpired(ts time.Time) bool {
now := time.Now()
return ts != NeverExpire && now.After(ts)
}

62
fileserve.go

@ -1,21 +1,28 @@
package main package main
import ( import (
"fmt"
"io"
"net/http" "net/http"
"net/url" "net/url"
"strconv"
"strings" "strings"
"time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/andreimarcu/linx-server/httputil"
"github.com/zenazn/goji/web" "github.com/zenazn/goji/web"
) )
func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) { func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"] fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, err := checkFile(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r) notFoundHandler(c, w, r)
return return
} else if err == BadMetadata {
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.") oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return return
} }
@ -31,8 +38,30 @@ func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
} }
w.Header().Set("Content-Security-Policy", Config.fileContentSecurityPolicy) w.Header().Set("Content-Security-Policy", Config.fileContentSecurityPolicy)
w.Header().Set("Referrer-Policy", Config.fileReferrerPolicy)
fileBackend.ServeFile(fileName, w, r)
w.Header().Set("Content-Type", metadata.Mimetype)
w.Header().Set("Content-Length", strconv.FormatInt(metadata.Size, 10))
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", metadata.Sha256sum))
w.Header().Set("Cache-Control", "public, no-cache")
modtime := time.Unix(0, 0)
if done := httputil.CheckPreconditions(w, r, modtime); done == true {
return
}
if r.Method != "HEAD" {
_, reader, err := storageBackend.Get(fileName)
if err != nil {
oopsHandler(c, w, r, RespAUTO, "Unable to open file.")
return
}
defer reader.Close()
if _, err = io.CopyN(w, reader, metadata.Size); err != nil {
oopsHandler(c, w, r, RespAUTO, err.Error())
}
}
} }
func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) { func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
@ -52,29 +81,24 @@ func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
return return
} }
w.Header().Set("Etag", timeStartedStr)
w.Header().Set("Cache-Control", "max-age=86400")
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", timeStartedStr))
w.Header().Set("Cache-Control", "public, max-age=86400")
http.ServeContent(w, r, filePath, timeStarted, file) http.ServeContent(w, r, filePath, timeStarted, file)
return return
} }
} }
func checkFile(filename string) error {
_, err := fileBackend.Exists(filename)
func checkFile(filename string) (metadata backends.Metadata, err error) {
metadata, err = storageBackend.Head(filename)
if err != nil { if err != nil {
return NotFoundErr
}
expired, err := isFileExpired(filename)
if err != nil {
return err
return
} }
if expired {
fileBackend.Delete(filename)
metaBackend.Delete(filename)
return NotFoundErr
if expiry.IsTsExpired(metadata.Expiry) {
storageBackend.Delete(filename)
err = backends.NotFoundErr
return
} }
return nil
return
} }

70
helpers/archive.go

@ -0,0 +1,70 @@
package helpers
import (
"archive/tar"
"archive/zip"
"compress/bzip2"
"compress/gzip"
"io"
"sort"
)
type ReadSeekerAt interface {
io.Reader
io.Seeker
io.ReaderAt
}
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) {
if mimetype == "application/x-tar" {
tReadr := tar.NewReader(r)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
} else if mimetype == "application/x-gzip" {
gzf, err := gzip.NewReader(r)
if err == nil {
tReadr := tar.NewReader(gzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
}
} else if mimetype == "application/x-bzip" {
bzf := bzip2.NewReader(r)
tReadr := tar.NewReader(bzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
files = append(files, hdr.Name)
}
}
sort.Strings(files)
} else if mimetype == "application/zip" {
zf, err := zip.NewReader(r, size)
if err == nil {
for _, f := range zf.File {
files = append(files, f.Name)
}
}
sort.Strings(files)
}
return
}

67
helpers/helpers.go

@ -0,0 +1,67 @@
package helpers
import (
"encoding/hex"
"io"
"unicode"
"github.com/minio/sha256-simd"
"gopkg.in/h2non/filetype.v1"
)
func DetectMime(r io.ReadSeeker) (string, error) {
// Get first 512 bytes for mimetype detection
header := make([]byte, 512)
r.Seek(0, 0)
r.Read(header)
r.Seek(0, 0)
kind, err := filetype.Match(header)
if err != nil {
return "application/octet-stream", err
} else if kind.MIME.Value != "" {
return kind.MIME.Value, nil
}
// Check if the file seems anything like text
if printable(header) {
return "text/plain", nil
} else {
return "application/octet-stream", nil
}
}
func Sha256sum(r io.ReadSeeker) (string, error) {
hasher := sha256.New()
r.Seek(0, 0)
_, err := io.Copy(hasher, r)
if err != nil {
return "", err
}
r.Seek(0, 0)
return hex.EncodeToString(hasher.Sum(nil)), nil
}
func printable(data []byte) bool {
for i, b := range data {
r := rune(b)
// A null terminator that's not at the beginning of the file
if r == 0 && i == 0 {
return false
} else if r == 0 && i < 0 {
continue
}
if r > unicode.MaxASCII {
return false
}
}
return true
}

27
httputil/LICENSE

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

218
httputil/conditional.go

@ -0,0 +1,218 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// HTTP file system request handler
package httputil
import (
"net/http"
"net/textproto"
"strings"
"time"
)
// scanETag determines if a syntactically valid ETag is present at s. If so,
// the ETag and remaining text after consuming ETag is returned. Otherwise,
// it returns "", "".
func scanETag(s string) (etag string, remain string) {
s = textproto.TrimString(s)
start := 0
if strings.HasPrefix(s, "W/") {
start = 2
}
if len(s[start:]) < 2 || s[start] != '"' {
return "", ""
}
// ETag is either W/"text" or "text".
// See RFC 7232 2.3.
for i := start + 1; i < len(s); i++ {
c := s[i]
switch {
// Character values allowed in ETags.
case c == 0x21 || c >= 0x23 && c <= 0x7E || c >= 0x80:
case c == '"':
return s[:i+1], s[i+1:]
default:
return "", ""
}
}
return "", ""
}
// etagStrongMatch reports whether a and b match using strong ETag comparison.
// Assumes a and b are valid ETags.
func etagStrongMatch(a, b string) bool {
return a == b && a != "" && a[0] == '"'
}
// etagWeakMatch reports whether a and b match using weak ETag comparison.
// Assumes a and b are valid ETags.
func etagWeakMatch(a, b string) bool {
return strings.TrimPrefix(a, "W/") == strings.TrimPrefix(b, "W/")
}
// condResult is the result of an HTTP request precondition check.
// See https://tools.ietf.org/html/rfc7232 section 3.
type condResult int
const (
condNone condResult = iota
condTrue
condFalse
)
func checkIfMatch(w http.ResponseWriter, r *http.Request) condResult {
im := r.Header.Get("If-Match")
if im == "" {
return condNone
}
for {
im = textproto.TrimString(im)
if len(im) == 0 {
break
}
if im[0] == ',' {
im = im[1:]
continue
}
if im[0] == '*' {
return condTrue
}
etag, remain := scanETag(im)
if etag == "" {
break
}
if etagStrongMatch(etag, w.Header().Get("Etag")) {
return condTrue
}
im = remain
}
return condFalse
}
func checkIfUnmodifiedSince(r *http.Request, modtime time.Time) condResult {
ius := r.Header.Get("If-Unmodified-Since")
if ius == "" || isZeroTime(modtime) {
return condNone
}
if t, err := http.ParseTime(ius); err == nil {
// The Date-Modified header truncates sub-second precision, so
// use mtime < t+1s instead of mtime <= t to check for unmodified.
if modtime.Before(t.Add(1 * time.Second)) {
return condTrue
}
return condFalse
}
return condNone
}
func checkIfNoneMatch(w http.ResponseWriter, r *http.Request) condResult {
inm := r.Header.Get("If-None-Match")
if inm == "" {
return condNone
}
buf := inm
for {
buf = textproto.TrimString(buf)
if len(buf) == 0 {
break
}
if buf[0] == ',' {
buf = buf[1:]
}
if buf[0] == '*' {
return condFalse
}
etag, remain := scanETag(buf)
if etag == "" {
break
}
if etagWeakMatch(etag, w.Header().Get("Etag")) {
return condFalse
}
buf = remain
}
return condTrue
}
func checkIfModifiedSince(r *http.Request, modtime time.Time) condResult {
if r.Method != "GET" && r.Method != "HEAD" {
return condNone
}
ims := r.Header.Get("If-Modified-Since")
if ims == "" || isZeroTime(modtime) {
return condNone
}
t, err := http.ParseTime(ims)
if err != nil {
return condNone
}
// The Date-Modified header truncates sub-second precision, so
// use mtime < t+1s instead of mtime <= t to check for unmodified.
if modtime.Before(t.Add(1 * time.Second)) {
return condFalse
}
return condTrue
}
var unixEpochTime = time.Unix(0, 0)
// isZeroTime reports whether t is obviously unspecified (either zero or Unix()=0).
func isZeroTime(t time.Time) bool {
return t.IsZero() || t.Equal(unixEpochTime)
}
func setLastModified(w http.ResponseWriter, modtime time.Time) {
if !isZeroTime(modtime) {
w.Header().Set("Last-Modified", modtime.UTC().Format(http.TimeFormat))
}
}
func writeNotModified(w http.ResponseWriter) {
// RFC 7232 section 4.1:
// a sender SHOULD NOT generate representation metadata other than the
// above listed fields unless said metadata exists for the purpose of
// guiding cache updates (e.g., Last-Modified might be useful if the
// response does not have an ETag field).
h := w.Header()
delete(h, "Content-Type")
delete(h, "Content-Length")
if h.Get("Etag") != "" {
delete(h, "Last-Modified")
}
w.WriteHeader(http.StatusNotModified)
}
// CheckPreconditions evaluates request preconditions and reports whether a precondition
// resulted in sending StatusNotModified or StatusPreconditionFailed.
func CheckPreconditions(w http.ResponseWriter, r *http.Request, modtime time.Time) (done bool) {
// This function carefully follows RFC 7232 section 6.
ch := checkIfMatch(w, r)
if ch == condNone {
ch = checkIfUnmodifiedSince(r, modtime)
}
if ch == condFalse {
w.WriteHeader(http.StatusPreconditionFailed)
return true
}
switch checkIfNoneMatch(w, r) {
case condFalse:
if r.Method == "GET" || r.Method == "HEAD" {
writeNotModified(w)
return true
} else {
w.WriteHeader(http.StatusPreconditionFailed)
return true
}
case condNone:
if checkIfModifiedSince(r, modtime) == condFalse {
writeNotModified(w)
return true
}
}
return false
}

46
linx-cleanup/cleanup.go

@ -0,0 +1,46 @@
package main
import (
"flag"
"log"
"github.com/andreimarcu/linx-server/backends/localfs"
"github.com/andreimarcu/linx-server/expiry"
)
func main() {
var filesDir string
var metaDir string
var noLogs bool
flag.StringVar(&filesDir, "filespath", "files/",
"path to files directory")
flag.StringVar(&metaDir, "metapath", "meta/",
"path to metadata directory")
flag.BoolVar(&noLogs, "nologs", false,
"don't log deleted files")
flag.Parse()
fileBackend := localfs.NewLocalfsBackend(metaDir, filesDir)
files, err := fileBackend.List()
if err != nil {
panic(err)
}
for _, filename := range files {
metadata, err := fileBackend.Head(filename)
if err != nil {
if !noLogs {
log.Printf("Failed to find metadata for %s", filename)
}
}
if expiry.IsTsExpired(metadata.Expiry) {
if !noLogs {
log.Printf("Delete %s", filename)
}
fileBackend.Delete(filename)
}
}
}

222
meta.go

@ -1,222 +0,0 @@
package main
import (
"archive/tar"
"archive/zip"
"bytes"
"compress/bzip2"
"compress/gzip"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"io"
"sort"
"time"
"unicode"
"github.com/dchest/uniuri"
"gopkg.in/h2non/filetype.v1"
)
type MetadataJSON struct {
DeleteKey string `json:"delete_key"`
Sha256sum string `json:"sha256sum"`
Mimetype string `json:"mimetype"`
Size int64 `json:"size"`
Expiry int64 `json:"expiry"`
ArchiveFiles []string `json:"archive_files,omitempty"`
ShortURL string `json:"short_url"`
}
type Metadata struct {
DeleteKey string
Sha256sum string
Mimetype string
Size int64
Expiry time.Time
ArchiveFiles []string
ShortURL string
}
var NotFoundErr = errors.New("File not found.")
var BadMetadata = errors.New("Corrupted metadata.")
func generateMetadata(fName string, exp time.Time, delKey string) (m Metadata, err error) {
file, err := fileBackend.Open(fName)
if err != nil {
return
}
defer file.Close()
m.Size, err = fileBackend.Size(fName)
if err != nil {
return
}
m.Expiry = exp
if delKey == "" {
m.DeleteKey = uniuri.NewLen(30)
} else {
m.DeleteKey = delKey
}
// Get first 512 bytes for mimetype detection
header := make([]byte, 512)
file.Read(header)
kind, err := filetype.Match(header)
if err != nil {
m.Mimetype = "application/octet-stream"
} else {
m.Mimetype = kind.MIME.Value
}
if m.Mimetype == "" {
// Check if the file seems anything like text
if printable(header) {
m.Mimetype = "text/plain"
} else {
m.Mimetype = "application/octet-stream"
}
}
// Compute the sha256sum
hasher := sha256.New()
file.Seek(0, 0)
_, err = io.Copy(hasher, file)
if err == nil {
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
}
file.Seek(0, 0)
// If archive, grab list of filenames
if m.Mimetype == "application/x-tar" {
tReadr := tar.NewReader(file)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
} else if m.Mimetype == "application/x-gzip" {
gzf, err := gzip.NewReader(file)
if err == nil {
tReadr := tar.NewReader(gzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
}
} else if m.Mimetype == "application/x-bzip" {
bzf := bzip2.NewReader(file)
tReadr := tar.NewReader(bzf)
for {
hdr, err := tReadr.Next()
if err == io.EOF || err != nil {
break
}
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
}
}
sort.Strings(m.ArchiveFiles)
} else if m.Mimetype == "application/zip" {
zf, err := zip.NewReader(file, m.Size)
if err == nil {
for _, f := range zf.File {
m.ArchiveFiles = append(m.ArchiveFiles, f.Name)
}
}
sort.Strings(m.ArchiveFiles)
}
return
}
func metadataWrite(filename string, metadata *Metadata) error {
mjson := MetadataJSON{}
mjson.DeleteKey = metadata.DeleteKey
mjson.Mimetype = metadata.Mimetype
mjson.ArchiveFiles = metadata.ArchiveFiles
mjson.Sha256sum = metadata.Sha256sum
mjson.Expiry = metadata.Expiry.Unix()
mjson.Size = metadata.Size
mjson.ShortURL = metadata.ShortURL
byt, err := json.Marshal(mjson)
if err != nil {
return err
}
if _, err := metaBackend.Put(filename, bytes.NewBuffer(byt)); err != nil {
return err
}
return nil
}
func metadataRead(filename string) (metadata Metadata, err error) {
b, err := metaBackend.Get(filename)
if err != nil {
// Metadata does not exist, generate one
newMData, err := generateMetadata(filename, neverExpire, "")
if err != nil {
return metadata, err
}
metadataWrite(filename, &newMData)
b, err = metaBackend.Get(filename)
if err != nil {
return metadata, BadMetadata
}
}
mjson := MetadataJSON{}
err = json.Unmarshal(b, &mjson)
if err != nil {
return metadata, BadMetadata
}
metadata.DeleteKey = mjson.DeleteKey
metadata.Mimetype = mjson.Mimetype
metadata.ArchiveFiles = mjson.ArchiveFiles
metadata.Sha256sum = mjson.Sha256sum
metadata.Expiry = time.Unix(mjson.Expiry, 0)
metadata.Size = mjson.Size
metadata.ShortURL = mjson.ShortURL
return
}
func printable(data []byte) bool {
for i, b := range data {
r := rune(b)
// A null terminator that's not at the beginning of the file
if r == 0 && i == 0 {
return false
} else if r == 0 && i < 0 {
continue
}
if r > unicode.MaxASCII {
return false
}
}
return true
}

46
pages.go

@ -21,8 +21,9 @@ const (
func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) { func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["index.html"], pongo2.Context{ err := renderTemplate(Templates["index.html"], pongo2.Context{
"maxsize": Config.maxSize,
"expirylist": listExpirationTimes(),
"maxsize": Config.maxSize,
"expirylist": listExpirationTimes(),
"forcerandom": Config.forceRandomFilename,
}, r, w) }, r, w)
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
@ -31,7 +32,8 @@ func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) { func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["paste.html"], pongo2.Context{ err := renderTemplate(Templates["paste.html"], pongo2.Context{
"expirylist": listExpirationTimes(),
"expirylist": listExpirationTimes(),
"forcerandom": Config.forceRandomFilename,
}, r, w) }, r, w)
if err != nil { if err != nil {
oopsHandler(c, w, r, RespHTML, "") oopsHandler(c, w, r, RespHTML, "")
@ -40,7 +42,8 @@ func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
func apiDocHandler(c web.C, w http.ResponseWriter, r *http.Request) { func apiDocHandler(c web.C, w http.ResponseWriter, r *http.Request) {
err := renderTemplate(Templates["API.html"], pongo2.Context{ err := renderTemplate(Templates["API.html"], pongo2.Context{
"siteurl": getSiteURL(r),
"siteurl": getSiteURL(r),
"forcerandom": Config.forceRandomFilename,
}, r, w) }, r, w)
if err != nil { if err != nil {
oopsHandler(c, w, r, RespHTML, "") oopsHandler(c, w, r, RespHTML, "")
@ -64,12 +67,10 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
w.WriteHeader(500) w.WriteHeader(500)
renderTemplate(Templates["oops.html"], pongo2.Context{"msg": msg}, r, w) renderTemplate(Templates["oops.html"], pongo2.Context{"msg": msg}, r, w)
return return
} else if rt == RespPLAIN { } else if rt == RespPLAIN {
w.WriteHeader(500) w.WriteHeader(500)
fmt.Fprintf(w, "%s", msg) fmt.Fprintf(w, "%s", msg)
return return
} else if rt == RespJSON { } else if rt == RespJSON {
js, _ := json.Marshal(map[string]string{ js, _ := json.Marshal(map[string]string{
"error": msg, "error": msg,
@ -79,7 +80,6 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
w.WriteHeader(500) w.WriteHeader(500)
w.Write(js) w.Write(js)
return return
} else if rt == RespAUTO { } else if rt == RespAUTO {
if strings.EqualFold("application/json", r.Header.Get("Accept")) { if strings.EqualFold("application/json", r.Header.Get("Accept")) {
oopsHandler(c, w, r, RespJSON, msg) oopsHandler(c, w, r, RespJSON, msg)
@ -89,11 +89,33 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
} }
} }
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusBadRequest)
err := renderTemplate(Templates["400.html"], pongo2.Context{}, r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, msg string) {
if rt == RespHTML {
w.WriteHeader(http.StatusBadRequest)
err := renderTemplate(Templates["400.html"], pongo2.Context{"msg": msg}, r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
return
} else if rt == RespPLAIN {
w.WriteHeader(http.StatusBadRequest)
fmt.Fprintf(w, "%s", msg)
return
} else if rt == RespJSON {
js, _ := json.Marshal(map[string]string{
"error": msg,
})
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.WriteHeader(http.StatusBadRequest)
w.Write(js)
return
} else if rt == RespAUTO {
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
badRequestHandler(c, w, r, RespJSON, msg)
} else {
badRequestHandler(c, w, r, RespHTML, msg)
}
} }
} }

67
server.go

@ -16,6 +16,7 @@ import (
"github.com/GeertJohan/go.rice" "github.com/GeertJohan/go.rice"
"github.com/andreimarcu/linx-server/backends" "github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/backends/localfs" "github.com/andreimarcu/linx-server/backends/localfs"
"github.com/andreimarcu/linx-server/backends/s3"
"github.com/flosch/pongo2" "github.com/flosch/pongo2"
"github.com/vharitonsky/iniflags" "github.com/vharitonsky/iniflags"
"github.com/zenazn/goji/graceful" "github.com/zenazn/goji/graceful"
@ -41,10 +42,13 @@ var Config struct {
siteName string siteName string
siteURL string siteURL string
sitePath string sitePath string
selifPath string
certFile string certFile string
keyFile string keyFile string
contentSecurityPolicy string contentSecurityPolicy string
fileContentSecurityPolicy string fileContentSecurityPolicy string
referrerPolicy string
fileReferrerPolicy string
xFrameOptions string xFrameOptions string
maxSize int64 maxSize int64
maxExpiry uint64 maxExpiry uint64
@ -56,7 +60,12 @@ var Config struct {
authFile string authFile string
remoteAuthFile string remoteAuthFile string
addHeaders headerList addHeaders headerList
googleShorterAPIKey string
noDirectAgents bool
s3Endpoint string
s3Region string
s3Bucket string
s3ForcePathStyle bool
forceRandomFilename bool
} }
var Templates = make(map[string]*pongo2.Template) var Templates = make(map[string]*pongo2.Template)
@ -65,8 +74,8 @@ var staticBox *rice.Box
var timeStarted time.Time var timeStarted time.Time
var timeStartedStr string var timeStartedStr string
var remoteAuthKeys []string var remoteAuthKeys []string
var metaBackend backends.StorageBackend
var fileBackend backends.StorageBackend
var metaStorageBackend backends.MetaStorageBackend
var storageBackend backends.StorageBackend
func setup() *web.Mux { func setup() *web.Mux {
mux := web.New() mux := web.New()
@ -85,8 +94,9 @@ func setup() *web.Mux {
mux.Use(middleware.Recoverer) mux.Use(middleware.Recoverer)
mux.Use(middleware.AutomaticOptions) mux.Use(middleware.AutomaticOptions)
mux.Use(ContentSecurityPolicy(CSPOptions{ mux.Use(ContentSecurityPolicy(CSPOptions{
policy: Config.contentSecurityPolicy,
frame: Config.xFrameOptions,
policy: Config.contentSecurityPolicy,
referrerPolicy: Config.referrerPolicy,
frame: Config.xFrameOptions,
})) }))
mux.Use(AddHeaders(Config.addHeaders)) mux.Use(AddHeaders(Config.addHeaders))
@ -124,8 +134,16 @@ func setup() *web.Mux {
Config.sitePath = "/" Config.sitePath = "/"
} }
metaBackend = localfs.NewLocalfsBackend(Config.metaDir)
fileBackend = localfs.NewLocalfsBackend(Config.filesDir)
Config.selifPath = strings.TrimLeft(Config.selifPath, "/")
if lastChar := Config.selifPath[len(Config.selifPath)-1:]; lastChar != "/" {
Config.selifPath = Config.selifPath + "/"
}
if Config.s3Bucket != "" {
storageBackend = s3.NewS3Backend(Config.s3Bucket, Config.s3Region, Config.s3Endpoint, Config.s3ForcePathStyle)
} else {
storageBackend = localfs.NewLocalfsBackend(Config.metaDir, Config.filesDir)
}
// Template setup // Template setup
p2l, err := NewPongo2TemplatesLoader() p2l, err := NewPongo2TemplatesLoader()
@ -144,10 +162,9 @@ func setup() *web.Mux {
// Routing setup // Routing setup
nameRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)$`) nameRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)$`)
selifRe := regexp.MustCompile("^" + Config.sitePath + `selif/(?P<name>[a-z0-9-\.]+)$`)
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + `selif/$`)
selifRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `(?P<name>[a-z0-9-\.]+)$`)
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `$`)
torrentRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/torrent$`) torrentRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/torrent$`)
shortRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/short$`)
if Config.authFile == "" { if Config.authFile == "" {
mux.Get(Config.sitePath, indexHandler) mux.Get(Config.sitePath, indexHandler)
@ -186,10 +203,6 @@ func setup() *web.Mux {
mux.Get(selifIndexRe, unauthorizedHandler) mux.Get(selifIndexRe, unauthorizedHandler)
mux.Get(torrentRe, fileTorrentHandler) mux.Get(torrentRe, fileTorrentHandler)
if Config.googleShorterAPIKey != "" {
mux.Get(shortRe, shortURLHandler)
}
mux.NotFound(notFoundHandler) mux.NotFound(notFoundHandler)
return mux return mux
@ -210,6 +223,8 @@ func main() {
"name of the site") "name of the site")
flag.StringVar(&Config.siteURL, "siteurl", "", flag.StringVar(&Config.siteURL, "siteurl", "",
"site base url (including trailing slash)") "site base url (including trailing slash)")
flag.StringVar(&Config.selifPath, "selifpath", "selif",
"path relative to site base url where files are accessed directly")
flag.Int64Var(&Config.maxSize, "maxsize", 4*1024*1024*1024, flag.Int64Var(&Config.maxSize, "maxsize", 4*1024*1024*1024,
"maximum upload file size in bytes (default 4GB)") "maximum upload file size in bytes (default 4GB)")
flag.Uint64Var(&Config.maxExpiry, "maxexpiry", 0, flag.Uint64Var(&Config.maxExpiry, "maxexpiry", 0,
@ -229,17 +244,33 @@ func main() {
flag.StringVar(&Config.remoteAuthFile, "remoteauthfile", "", flag.StringVar(&Config.remoteAuthFile, "remoteauthfile", "",
"path to a file containing newline-separated scrypted auth keys for remote uploads") "path to a file containing newline-separated scrypted auth keys for remote uploads")
flag.StringVar(&Config.contentSecurityPolicy, "contentsecuritypolicy", flag.StringVar(&Config.contentSecurityPolicy, "contentsecuritypolicy",
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
"value of default Content-Security-Policy header") "value of default Content-Security-Policy header")
flag.StringVar(&Config.fileContentSecurityPolicy, "filecontentsecuritypolicy", flag.StringVar(&Config.fileContentSecurityPolicy, "filecontentsecuritypolicy",
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
"value of Content-Security-Policy header for file access") "value of Content-Security-Policy header for file access")
flag.StringVar(&Config.referrerPolicy, "referrerpolicy",
"same-origin",
"value of default Referrer-Policy header")
flag.StringVar(&Config.fileReferrerPolicy, "filereferrerpolicy",
"same-origin",
"value of Referrer-Policy header for file access")
flag.StringVar(&Config.xFrameOptions, "xframeoptions", "SAMEORIGIN", flag.StringVar(&Config.xFrameOptions, "xframeoptions", "SAMEORIGIN",
"value of X-Frame-Options header") "value of X-Frame-Options header")
flag.Var(&Config.addHeaders, "addheader", flag.Var(&Config.addHeaders, "addheader",
"Add an arbitrary header to the response. This option can be used multiple times.") "Add an arbitrary header to the response. This option can be used multiple times.")
flag.StringVar(&Config.googleShorterAPIKey, "googleapikey", "",
"API Key for Google's URL Shortener.")
flag.BoolVar(&Config.noDirectAgents, "nodirectagents", false,
"disable serving files directly for wget/curl user agents")
flag.StringVar(&Config.s3Endpoint, "s3-endpoint", "",
"S3 endpoint")
flag.StringVar(&Config.s3Region, "s3-region", "",
"S3 region")
flag.StringVar(&Config.s3Bucket, "s3-bucket", "",
"S3 bucket to use for files and metadata")
flag.BoolVar(&Config.s3ForcePathStyle, "s3-force-path-style", false,
"Force path-style addressing for S3 (e.g. https://s3.amazonaws.com/linx/example.txt)")
flag.BoolVar(&Config.forceRandomFilename, "force-random-filename", false,
"Force all uploads to use a random filename")
iniflags.Parse() iniflags.Parse()

206
server_test.go

@ -173,7 +173,7 @@ func TestFileNotFound(t *testing.T) {
filename := generateBarename() filename := generateBarename()
req, err := http.NewRequest("GET", "/selif/"+filename, nil)
req, err := http.NewRequest("GET", "/"+Config.selifPath+filename, nil)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -486,7 +486,6 @@ func TestPostJSONUploadMaxExpiry(t *testing.T) {
var myjson RespOkJSON var myjson RespOkJSON
err = json.Unmarshal([]byte(w.Body.String()), &myjson) err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil { if err != nil {
fmt.Println(w.Body.String())
t.Fatal(err) t.Fatal(err)
} }
@ -643,14 +642,45 @@ func TestPostEmptyUpload(t *testing.T) {
mux.ServeHTTP(w, req) mux.ServeHTTP(w, req)
if w.Code != 500 {
if w.Code != 400 {
t.Log(w.Body.String()) t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
t.Fatalf("Status code is not 400, but %d", w.Code)
}
}
func TestPostTooLargeUpload(t *testing.T) {
mux := setup()
oldMaxSize := Config.maxSize
Config.maxSize = 2
w := httptest.NewRecorder()
filename := generateBarename() + ".txt"
var b bytes.Buffer
mw := multipart.NewWriter(&b)
fw, err := mw.CreateFormFile("file", filename)
if err != nil {
t.Fatal(err)
}
fw.Write([]byte("test content"))
mw.Close()
req, err := http.NewRequest("POST", "/upload/", &b)
req.Header.Set("Content-Type", mw.FormDataContentType())
req.Header.Set("Referer", Config.siteURL)
if err != nil {
t.Fatal(err)
} }
if !strings.Contains(w.Body.String(), "Empty file") {
t.Fatal("Response did not contain 'Empty file'")
mux.ServeHTTP(w, req)
if w.Code != 400 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 400, but %d", w.Code)
} }
Config.maxSize = oldMaxSize
} }
func TestPostEmptyJSONUpload(t *testing.T) { func TestPostEmptyJSONUpload(t *testing.T) {
@ -679,9 +709,9 @@ func TestPostEmptyJSONUpload(t *testing.T) {
mux.ServeHTTP(w, req) mux.ServeHTTP(w, req)
if w.Code != 500 {
if w.Code != 400 {
t.Log(w.Body.String()) t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
t.Fatalf("Status code is not 400, but %d", w.Code)
} }
var myjson RespErrJSON var myjson RespErrJSON
@ -690,7 +720,7 @@ func TestPostEmptyJSONUpload(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
if myjson.Error != "Could not upload file: Empty file" {
if myjson.Error != "Empty file" {
t.Fatal("Json 'error' was not 'Empty file' but " + myjson.Error) t.Fatal("Json 'error' was not 'Empty file' but " + myjson.Error)
} }
} }
@ -733,6 +763,32 @@ func TestPutRandomizedUpload(t *testing.T) {
} }
} }
func TestPutForceRandomUpload(t *testing.T) {
mux := setup()
w := httptest.NewRecorder()
oldFRF := Config.forceRandomFilename
Config.forceRandomFilename = true
filename := "randomizeme.file"
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
// while this should also work without this header, let's try to force
// the randomized filename off to be sure
req.Header.Set("Linx-Randomize", "no")
mux.ServeHTTP(w, req)
if w.Body.String() == Config.siteURL+filename {
t.Fatal("Filename was not random")
}
Config.forceRandomFilename = oldFRF
}
func TestPutNoExtensionUpload(t *testing.T) { func TestPutNoExtensionUpload(t *testing.T) {
mux := setup() mux := setup()
w := httptest.NewRecorder() w := httptest.NewRecorder()
@ -768,11 +824,41 @@ func TestPutEmptyUpload(t *testing.T) {
mux.ServeHTTP(w, req) mux.ServeHTTP(w, req)
if !strings.Contains(w.Body.String(), "Empty file") {
t.Fatal("Response doesn't contain'Empty file'")
if w.Code != 400 {
t.Fatalf("Status code is not 400, but %d", w.Code)
} }
} }
func TestPutTooLargeUpload(t *testing.T) {
mux := setup()
oldMaxSize := Config.maxSize
Config.maxSize = 2
w := httptest.NewRecorder()
filename := generateBarename() + ".file"
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File too big"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Linx-Randomize", "yes")
mux.ServeHTTP(w, req)
if w.Code != 500 {
t.Log(w.Body.String())
t.Fatalf("Status code is not 500, but %d", w.Code)
}
if !strings.Contains(w.Body.String(), "request body too large") {
t.Fatal("Response did not contain 'request body too large'")
}
Config.maxSize = oldMaxSize
}
func TestPutJSONUpload(t *testing.T) { func TestPutJSONUpload(t *testing.T) {
var myjson RespOkJSON var myjson RespOkJSON
@ -941,7 +1027,7 @@ func TestPutAndOverwrite(t *testing.T) {
// Make sure it's the new file // Make sure it's the new file
w = httptest.NewRecorder() w = httptest.NewRecorder()
req, err = http.NewRequest("GET", "/selif/"+myjson.Filename, nil)
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
mux.ServeHTTP(w, req) mux.ServeHTTP(w, req)
if w.Code == 404 { if w.Code == 404 {
@ -953,6 +1039,55 @@ func TestPutAndOverwrite(t *testing.T) {
} }
} }
func TestPutAndOverwriteForceRandom(t *testing.T) {
var myjson RespOkJSON
mux := setup()
w := httptest.NewRecorder()
oldFRF := Config.forceRandomFilename
Config.forceRandomFilename = true
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Accept", "application/json")
mux.ServeHTTP(w, req)
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil {
t.Fatal(err)
}
// Overwrite it
w = httptest.NewRecorder()
req, err = http.NewRequest("PUT", "/upload/"+myjson.Filename, strings.NewReader("New file content"))
req.Header.Set("Linx-Delete-Key", myjson.Delete_Key)
mux.ServeHTTP(w, req)
if w.Code != 200 {
t.Fatal("Status code was not 200, but " + strconv.Itoa(w.Code))
}
// Make sure it's the new file
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
mux.ServeHTTP(w, req)
if w.Code == 404 {
t.Fatal("Status code was 404")
}
if w.Body.String() != "New file content" {
t.Fatal("File did not contain 'New file content")
}
Config.forceRandomFilename = oldFRF
}
func TestPutAndSpecificDelete(t *testing.T) { func TestPutAndSpecificDelete(t *testing.T) {
var myjson RespOkJSON var myjson RespOkJSON
@ -1121,3 +1256,50 @@ func TestShutdown(t *testing.T) {
os.RemoveAll(Config.filesDir) os.RemoveAll(Config.filesDir)
os.RemoveAll(Config.metaDir) os.RemoveAll(Config.metaDir)
} }
func TestPutAndGetCLI(t *testing.T) {
var myjson RespOkJSON
mux := setup()
// upload file
w := httptest.NewRecorder()
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
if err != nil {
t.Fatal(err)
}
req.Header.Set("Accept", "application/json")
mux.ServeHTTP(w, req)
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
if err != nil {
t.Fatal(err)
}
// request file without wget user agent
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", myjson.Url, nil)
if err != nil {
t.Fatal(err)
}
mux.ServeHTTP(w, req)
contentType := w.Header().Get("Content-Type")
if strings.HasPrefix(contentType, "text/plain") {
t.Fatalf("Didn't receive file display page but %s", contentType)
}
// request file with wget user agent
w = httptest.NewRecorder()
req, err = http.NewRequest("GET", myjson.Url, nil)
req.Header.Set("User-Agent", "wget")
if err != nil {
t.Fatal(err)
}
mux.ServeHTTP(w, req)
contentType = w.Header().Get("Content-Type")
if !strings.HasPrefix(contentType, "text/plain") {
t.Fatalf("Didn't receive file directly but %s", contentType)
}
}

89
shorturl.go

@ -1,89 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"errors"
"net/http"
"github.com/zenazn/goji/web"
)
type shortenerRequest struct {
LongURL string `json:"longUrl"`
}
type shortenerResponse struct {
Kind string `json:"kind"`
ID string `json:"id"`
LongURL string `json:"longUrl"`
Error struct {
Code int `json:"code"`
Message string `json:"message"`
} `json:"error"`
}
func shortURLHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
notFoundHandler(c, w, r)
return
}
metadata, err := metadataRead(fileName)
if err != nil {
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
return
}
if metadata.ShortURL == "" {
url, err := shortenURL(getSiteURL(r) + fileName)
if err != nil {
oopsHandler(c, w, r, RespJSON, err.Error())
return
}
metadata.ShortURL = url
err = metadataWrite(fileName, &metadata)
if err != nil {
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
return
}
}
js, _ := json.Marshal(map[string]string{
"shortUrl": metadata.ShortURL,
})
w.Write(js)
return
}
func shortenURL(url string) (string, error) {
apiURL := "https://www.googleapis.com/urlshortener/v1/url?key=" + Config.googleShorterAPIKey
jsonStr, _ := json.Marshal(shortenerRequest{LongURL: url})
req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonStr))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
shortenerResponse := new(shortenerResponse)
err = json.NewDecoder(resp.Body).Decode(shortenerResponse)
if err != nil {
return "", err
}
if shortenerResponse.Error.Message != "" {
return "", errors.New(shortenerResponse.Error.Message)
}
return shortenerResponse.ID, nil
}

10
static/css/dropzone.css

@ -31,17 +31,25 @@
border: 2px solid #FAFBFC; border: 2px solid #FAFBFC;
} }
#dropzone { width: 400px;
#dropzone {
width: 400px;
margin-left: auto; margin-left: auto;
margin-right: auto; margin-right: auto;
} }
@media(max-width: 450px) {
#dropzone {
width: auto;
}
}
#uploads { #uploads {
margin-top: 20px; margin-top: 20px;
} }
div.dz-default { div.dz-default {
border: 2px dashed #C9C9C9; border: 2px dashed #C9C9C9;
border-radius: 5px;
color: #C9C9C9; color: #C9C9C9;
font: 14px "helvetica neue",helvetica,arial,sans-serif; font: 14px "helvetica neue",helvetica,arial,sans-serif;
background-color: #FAFBFC; background-color: #FAFBFC;

3
static/css/github-markdown.css

@ -8,7 +8,8 @@
font-size: 12px; font-size: 12px;
line-height: 1.6; line-height: 1.6;
word-wrap: break-word; word-wrap: break-word;
width: 680px;
width: 80vw;
max-width: 680px;
padding: 10px; padding: 10px;
} }

205
static/css/linx.css

@ -1,56 +1,56 @@
body { body {
background-color: #E8ECF0;
color: #556A7F;
background-color: #E8ECF0;
color: #556A7F;
font-family: Arial, Helvetica, sans-serif;
font-size: 14px;
font-family: Arial, Helvetica, sans-serif;
font-size: 14px;
} }
#container_container { #container_container {
display: table;
table-layout: fixed;
margin-left: auto;
margin-right: auto;
display: table;
table-layout: fixed;
margin-left: auto;
margin-right: auto;
} }
#container { #container {
display: table-cell;
min-width: 200px;
display: table-cell;
min-width: 200px;
} }
#header a { #header a {
text-decoration: none;
color: #556A7F;
text-decoration: none;
color: #556A7F;
} }
#navigation { #navigation {
margin-top: 4px;
margin-top: 4px;
} }
#navigation a { #navigation a {
text-decoration: none;
border-bottom: 1px dotted #556A7F;
color: #556A7F;
text-decoration: none;
border-bottom: 1px dotted #556A7F;
color: #556A7F;
} }
#navigation a:hover { #navigation a:hover {
background-color: #C7D1EB;
background-color: #C7D1EB;
} }
#main { #main {
background-color: white;
background-color: white;
padding: 6px 5px 8px 5px;
padding: 6px 5px 8px 5px;
-moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc;
-moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc;
text-align: center;
text-align: center;
} }
#main a { #main a {
color: #556A7F;
color: #556A7F;
} }
#normal-content { #normal-content {
@ -62,28 +62,29 @@ body {
margin-bottom: 0; margin-bottom: 0;
} }
.ninfo {
margin-bottom: 5px;
}
.dinfo { .dinfo {
-moz-box-shadow: 1px 1px 1px 1px #ccc; -moz-box-shadow: 1px 1px 1px 1px #ccc;
-webkit-box-shadow: 1px 1px 1px 1px #ccc; -webkit-box-shadow: 1px 1px 1px 1px #ccc;
box-shadow: 1px 1px 1px 1px #ccc; box-shadow: 1px 1px 1px 1px #ccc;
margin-bottom: 15px; margin-bottom: 15px;
} }
#info { #info {
text-align: left;
background-color: white; background-color: white;
padding: 5px 5px 5px 5px;
padding: 5px;
} }
#info #filename,
#editform #filename {
width: 232px;
.info-flex {
display: flex;
flex-wrap: wrap;
align-items: baseline;
justify-content: space-between;
}
.info-actions {
margin-left: 15px;
font-size: 13px;
text-align: right;
} }
#info #extension, #info #extension,
@ -91,15 +92,6 @@ body {
width: 40px; width: 40px;
} }
#info .float-left {
margin-top: 2px;
margin-right: 20px;
}
#info .right {
font-size: 13px;
}
#info a { #info a {
text-decoration: none; text-decoration: none;
color: #556A7F; color: #556A7F;
@ -110,88 +102,97 @@ body {
background-color: #E8ECF0; background-color: #E8ECF0;
} }
#info input[type=text] {
border: 0;
color: #556A7F;
#info input[type=checkbox] {
margin: 0;
vertical-align: bottom;
} }
#footer { #footer {
color: gray;
text-align: right;
margin-top: 30px;
margin-bottom: 10px;
font-size: 11px;
color: gray;
text-align: right;
margin-top: 30px;
margin-bottom: 10px;
font-size: 11px;
} }
#footer a { #footer a {
color: gray;
text-decoration: none;
color: gray;
text-decoration: none;
} }
.normal { .normal {
text-align: left;
font-size: 13px;
text-align: left;
font-size: 13px;
} }
.normal a { .normal a {
text-decoration: none;
border-bottom: 1px dotted gray;
text-decoration: none;
border-bottom: 1px dotted gray;
} }
.normal a:hover { .normal a:hover {
color: black;
background-color: #E8ECF0;
color: black;
background-color: #E8ECF0;
} }
.normal ul { .normal ul {
padding-left: 15px;
padding-left: 15px;
} }
.normal li { .normal li {
margin-bottom: 3px;
list-style: none;
margin-bottom: 3px;
list-style: none;
} }
.normal li a { .normal li a {
font-weight: bold;
font-weight: bold;
} }
.fixed { .fixed {
width: 800px;
width: 80vw;
max-width: 800px;
}
.paste {
width: 70vw;
max-width: 700px;
} }
.needs-border { .needs-border {
border-top: 1px solid rgb(214, 214, 214);
border-top: 1px solid rgb(214, 214, 214);
} }
.left { .left {
text-align: left;
text-align: left;
} }
.float-left { .float-left {
float: left;
float: left;
}
.pad-left {
padding-left: 10px;
} }
.pad-right { .pad-right {
padding-right: 10px;
padding-right: 10px;
} }
.text-right { .text-right {
text-align: right;
text-align: right;
} }
.center { .center {
text-align: center;
text-align: center;
} }
.float-right, .right { .float-right, .right {
float: right;
float: right;
} }
.clear { .clear {
clear: both;
clear: both;
} }
#upload_header { #upload_header {
@ -245,19 +246,24 @@ body {
} }
#choices { #choices {
float: left;
display: flex;
align-items: center;
flex-wrap: wrap;
justify-content: space-between;
width: 100%; width: 100%;
text-align: left;
vertical-align: bottom;
margin-top: 5px; margin-top: 5px;
font-size:13px;
font-size: 13px;
} }
#expiry { #expiry {
float: right;
padding-top: 1px; padding-top: 1px;
} }
#randomize {
vertical-align: bottom;
margin: 0;
}
.oopscontent { .oopscontent {
width: 400px; width: 400px;
} }
@ -267,13 +273,35 @@ body {
border: 0; border: 0;
} }
.error-404 img {
max-width: 90vw;
}
.padme {
padding-left: 5px;
padding-right: 5px;
}
.editor { .editor {
width: 705px;
height: 450px;
border-color: #cccccc;
font-family: monospace;
resize: none;
overflow: auto;
width: 100%;
height: 450px;
border: 1px solid #eaeaea;
font-family: monospace;
resize: none;
overflow: auto;
border-radius: 2px;
padding: 2px;
box-sizing: border-box;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
}
#info input[type=text] {
border: 1px solid #eaeaea;
color: #556A7F;
padding: 2px 4px;
font-family: Arial, Helvetica, sans-serif;
} }
.storygreen { .storygreen {
@ -297,7 +325,7 @@ body {
.display-audio, .display-audio,
.display-file { .display-file {
width: 500px;
width: 100%;
} }
.display-image { .display-image {
@ -325,15 +353,16 @@ body {
#editform, #editform,
#editform .editor { #editform .editor {
display: none; display: none;
width: 100%
} }
#codeb { #codeb {
white-space: pre-wrap; white-space: pre-wrap;
} }
#editor {
#inplace-editor {
display: none; display: none;
width: 794px;
width: 100%;
height: 800px; height: 800px;
font-size: 13px; font-size: 13px;
} }

6
static/js/bin.js

@ -1,6 +1,6 @@
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later // @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
var navlist = document.getElementById("info").getElementsByClassName("right")[0];
var navlist = document.getElementById("info").getElementsByClassName("info-actions")[0];
init(); init();
@ -32,13 +32,13 @@ function edit(ev) {
var normalcontent = document.getElementById("normal-content"); var normalcontent = document.getElementById("normal-content");
normalcontent.removeChild(document.getElementById("normal-code")); normalcontent.removeChild(document.getElementById("normal-code"));
var editordiv = document.getElementById("editor");
var editordiv = document.getElementById("inplace-editor");
editordiv.style.display = "block"; editordiv.style.display = "block";
editordiv.addEventListener('keydown', handleTab); editordiv.addEventListener('keydown', handleTab);
} }
function paste(ev) { function paste(ev) {
var editordiv = document.getElementById("editor");
var editordiv = document.getElementById("inplace-editor");
document.getElementById("newcontent").value = editordiv.value; document.getElementById("newcontent").value = editordiv.value;
document.forms["reply"].submit(); document.forms["reply"].submit();
} }

39
static/js/shorturl.js

@ -1,39 +0,0 @@
document.getElementById('shorturl').addEventListener('click', function (e) {
e.preventDefault();
if (e.target.href !== "") return;
xhr = new XMLHttpRequest();
xhr.open("GET", e.target.dataset.url, true);
xhr.setRequestHeader('Accept', 'application/json');
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
var resp = JSON.parse(xhr.responseText);
if (xhr.status === 200 && resp.error == null) {
e.target.innerText = resp.shortUrl;
e.target.href = resp.shortUrl;
e.target.setAttribute('aria-label', 'Click to copy into clipboard')
} else {
e.target.setAttribute('aria-label', resp.error)
}
}
};
xhr.send();
});
var clipboard = new Clipboard("#shorturl", {
text: function (trigger) {
if (trigger.href == null) return;
return trigger.href;
}
});
clipboard.on('success', function (e) {
e.trigger.setAttribute('aria-label', 'Successfully copied')
});
clipboard.on('error', function (e) {
e.trigger.setAttribute('aria-label', 'Your browser does not support coping to clipboard')
});

171
static/js/upload.js

@ -1,51 +1,54 @@
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later // @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
Dropzone.options.dropzone = { Dropzone.options.dropzone = {
init: function() {
var dzone = document.getElementById("dzone");
dzone.style.display = "block";
},
addedfile: function(file) {
var upload = document.createElement("div");
upload.className = "upload";
init: function() {
var dzone = document.getElementById("dzone");
dzone.style.display = "block";
},
addedfile: function(file) {
var upload = document.createElement("div");
upload.className = "upload";
var fileLabel = document.createElement("span");
fileLabel.innerHTML = file.name;
file.fileLabel = fileLabel;
upload.appendChild(fileLabel);
var fileLabel = document.createElement("span");
fileLabel.innerHTML = file.name;
file.fileLabel = fileLabel;
upload.appendChild(fileLabel);
var fileActions = document.createElement("div");
fileActions.className = "right";
file.fileActions = fileActions;
upload.appendChild(fileActions);
var fileActions = document.createElement("div");
fileActions.className = "right";
file.fileActions = fileActions;
upload.appendChild(fileActions);
var cancelAction = document.createElement("span");
cancelAction.className = "cancel";
cancelAction.innerHTML = "Cancel";
cancelAction.addEventListener('click', function(ev) {
this.removeFile(file);
}.bind(this));
file.cancelActionElement = cancelAction;
fileActions.appendChild(cancelAction);
var cancelAction = document.createElement("span");
cancelAction.className = "cancel";
cancelAction.innerHTML = "Cancel";
cancelAction.addEventListener('click', function(ev) {
this.removeFile(file);
}.bind(this));
file.cancelActionElement = cancelAction;
fileActions.appendChild(cancelAction);
var progress = document.createElement("span");
file.progressElement = progress;
fileActions.appendChild(progress);
var progress = document.createElement("span");
file.progressElement = progress;
fileActions.appendChild(progress);
file.uploadElement = upload;
file.uploadElement = upload;
document.getElementById("uploads").appendChild(upload);
},
uploadprogress: function(file, p, bytesSent) {
p = parseInt(p);
file.progressElement.innerHTML = p + "%";
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
},
sending: function(file, xhr, formData) {
formData.append("randomize", document.getElementById("randomize").checked);
formData.append("expires", document.getElementById("expires").value);
},
success: function(file, resp) {
document.getElementById("uploads").appendChild(upload);
},
uploadprogress: function(file, p, bytesSent) {
p = parseInt(p);
file.progressElement.innerHTML = p + "%";
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
},
sending: function(file, xhr, formData) {
var randomize = document.getElementById("randomize");
if(randomize != null) {
formData.append("randomize", randomize.checked);
}
formData.append("expires", document.getElementById("expires").value);
},
success: function(file, resp) {
file.fileActions.removeChild(file.progressElement); file.fileActions.removeChild(file.progressElement);
var fileLabelLink = document.createElement("a"); var fileLabelLink = document.createElement("a");
@ -59,51 +62,61 @@ Dropzone.options.dropzone = {
var deleteAction = document.createElement("span"); var deleteAction = document.createElement("span");
deleteAction.innerHTML = "Delete"; deleteAction.innerHTML = "Delete";
deleteAction.className = "cancel"; deleteAction.className = "cancel";
deleteAction.addEventListener('click', function(ev) {
xhr = new XMLHttpRequest();
xhr.open("DELETE", resp.url, true);
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
xhr.onreadystatechange = function(file) {
if (xhr.readyState == 4 && xhr.status === 200) {
var text = document.createTextNode("Deleted ");
file.fileLabel.insertBefore(text, file.fileLabelLink);
file.fileLabel.className = "deleted";
file.fileActions.removeChild(file.cancelActionElement);
}
}.bind(this, file);
xhr.send();
});
file.fileActions.removeChild(file.cancelActionElement);
file.cancelActionElement = deleteAction;
file.fileActions.appendChild(deleteAction);
},
error: function(file, resp, xhrO) {
deleteAction.addEventListener('click', function(ev) {
xhr = new XMLHttpRequest();
xhr.open("DELETE", resp.url, true);
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
xhr.onreadystatechange = function(file) {
if (xhr.readyState == 4 && xhr.status === 200) {
var text = document.createTextNode("Deleted ");
file.fileLabel.insertBefore(text, file.fileLabelLink);
file.fileLabel.className = "deleted";
file.fileActions.removeChild(file.cancelActionElement);
}
}.bind(this, file);
xhr.send();
});
file.fileActions.removeChild(file.cancelActionElement);
file.cancelActionElement = deleteAction;
file.fileActions.appendChild(deleteAction);
},
error: function(file, resp, xhrO) {
file.fileActions.removeChild(file.cancelActionElement); file.fileActions.removeChild(file.cancelActionElement);
file.fileActions.removeChild(file.progressElement); file.fileActions.removeChild(file.progressElement);
if (file.status === "canceled") {
file.fileLabel.innerHTML = file.name + ": Canceled ";
}
else {
if (resp.error) {
file.fileLabel.innerHTML = file.name + ": " + resp.error;
}
else if (resp.includes("<html")) {
file.fileLabel.innerHTML = file.name + ": Server Error";
}
else {
file.fileLabel.innerHTML = file.name + ": " + resp;
}
}
file.fileLabel.className = "error";
},
if (file.status === "canceled") {
file.fileLabel.innerHTML = file.name + ": Canceled ";
}
else {
if (resp.error) {
file.fileLabel.innerHTML = file.name + ": " + resp.error;
}
else if (resp.includes("<html")) {
file.fileLabel.innerHTML = file.name + ": Server Error";
}
else {
file.fileLabel.innerHTML = file.name + ": " + resp;
}
}
file.fileLabel.className = "error";
},
maxFilesize: Math.round(parseInt(document.getElementById("dropzone").getAttribute("data-maxsize"), 10) / 1024 / 1024), maxFilesize: Math.round(parseInt(document.getElementById("dropzone").getAttribute("data-maxsize"), 10) / 1024 / 1024),
previewsContainer: "#uploads",
parallelUploads: 5,
headers: {"Accept": "application/json"},
dictDefaultMessage: "Click or Drop file(s)",
dictFallbackMessage: ""
previewsContainer: "#uploads",
parallelUploads: 5,
headers: {"Accept": "application/json"},
dictDefaultMessage: "Click or Drop file(s) or Paste image",
dictFallbackMessage: ""
};
document.onpaste = function(event) {
var items = (event.clipboardData || event.originalEvent.clipboardData).items;
for (index in items) {
var item = items[index];
if (item.kind === "file") {
Dropzone.forElement("#dropzone").addFile(item.getAsFile());
}
}
}; };
// @end-license // @end-license

1
templates.go

@ -83,6 +83,7 @@ func renderTemplate(tpl *pongo2.Template, context pongo2.Context, r *http.Reques
} }
context["sitepath"] = Config.sitePath context["sitepath"] = Config.sitePath
context["selifpath"] = Config.selifPath
context["using_auth"] = Config.authFile != "" context["using_auth"] = Config.authFile != ""
return tpl.ExecuteWriter(context, writer) return tpl.ExecuteWriter(context, writer)

4
templates/404.html

@ -1,5 +1,7 @@
{% extends "base.html" %} {% extends "base.html" %}
{% block content %} {% block content %}
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
<div class="error-404">
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
</div>
{% endblock %} {% endblock %}

16
templates/API.html

@ -25,8 +25,10 @@
<p><strong>Optional headers with the request</strong></p> <p><strong>Optional headers with the request</strong></p>
{% if not forcerandom %}
<p>Randomize the filename<br/> <p>Randomize the filename<br/>
<code>Linx-Randomize: yes</code></p> <code>Linx-Randomize: yes</code></p>
{% endif %}
<p>Specify a custom deletion key<br/> <p>Specify a custom deletion key<br/>
<code>Linx-Delete-Key: mysecret</code></p> <code>Linx-Delete-Key: mysecret</code></p>
@ -41,6 +43,7 @@
<blockquote> <blockquote>
<p>“url”: the publicly available upload url<br/> <p>“url”: the publicly available upload url<br/>
“direct_url”: the url to access the file directly<br/>
“filename”: the (optionally generated) filename<br/> “filename”: the (optionally generated) filename<br/>
“delete_key”: the (optionally generated) deletion key,<br/> “delete_key”: the (optionally generated) deletion key,<br/>
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/> “expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
@ -55,30 +58,30 @@
{% if using_auth %} {% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -T myphoto.jpg {{ siteurl }}upload/ <pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}7z4h4ut.jpg{% endif %}</code></pre>
{% else %} {% else %}
<pre><code>$ curl -T myphoto.jpg {{ siteurl }}upload/ <pre><code>$ curl -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}wtq7pan.jpg{% endif %}</code></pre>
{% endif %} {% endif %}
<p>Uploading myphoto.jpg with an expiry of 20 minutes</p> <p>Uploading myphoto.jpg with an expiry of 20 minutes</p>
{% if using_auth %} {% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/ <pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}jm295snf.jpg{% endif %}</code></pre>
{% else %} {% else %}
<pre><code>$ curl -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/ <pre><code>$ curl -H &#34;Linx-Expiry: 1200&#34; -T myphoto.jpg {{ siteurl }}upload/
{{ siteurl }}myphoto.jpg</code></pre>
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}1doym9u2.jpg{% endif %}</code></pre>
{% endif %} {% endif %}
<p>Uploading myphoto.jpg with a random filename and getting a json response:</p> <p>Uploading myphoto.jpg with a random filename and getting a json response:</p>
{% if using_auth %} {% if using_auth %}
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Accept: application/json&#34; -H &#34;Linx-Randomize: yes&#34; -T myphoto.jpg {{ siteurl }}upload/
<pre><code>$ curl -H &#34;Linx-Api-Key: mysecretkey&#34; -H &#34;Accept: application/json&#34;{% if not forcerandom %} -H &#34;Linx-Randomize: yes&#34;{% endif %} -T myphoto.jpg {{ siteurl }}upload/
{&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;, {&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;,
&#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre> &#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre>
{% else %} {% else %}
<pre><code>$ curl -H &#34;Accept: application/json&#34; -H &#34;Linx-Randomize: yes&#34; -T myphoto.jpg {{ siteurl }}upload/
<pre><code>$ curl -H &#34;Accept: application/json&#34;{% if not forcerandom %} -H &#34;Linx-Randomize: yes&#34;{% endif %} -T myphoto.jpg {{ siteurl }}upload/
{&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;, {&#34;delete_key&#34;:&#34;...&#34;,&#34;expiry&#34;:&#34;0&#34;,&#34;filename&#34;:&#34;f34h4iu.jpg&#34;,&#34;mimetype&#34;:&#34;image/jpeg&#34;,
&#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre> &#34;sha256sum&#34;:&#34;...&#34;,&#34;size&#34;:&#34;...&#34;,&#34;url&#34;:&#34;{{ siteurl }}f34h4iu.jpg&#34;}</code></pre>
{% endif %} {% endif %}
@ -121,6 +124,7 @@ DELETED</code></pre>
<blockquote> <blockquote>
<p>“url”: the publicly available upload url<br/> <p>“url”: the publicly available upload url<br/>
“direct_url”: the url to access the file directly<br/>
“filename”: the (optionally generated) filename<br/> “filename”: the (optionally generated) filename<br/>
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/> “expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
“size”: the size in bytes of the file<br/> “size”: the size in bytes of the file<br/>

3
templates/base.html

@ -3,7 +3,8 @@
<head> <head>
<title>{% block title %}{{ sitename }}{% endblock %}</title> <title>{% block title %}{{ sitename }}{% endblock %}</title>
<meta charset='utf-8' content='text/html' http-equiv='content-type'> <meta charset='utf-8' content='text/html' http-equiv='content-type'>
<link href='{{ sitepath }}static/css/linx.css' media='screen, projection' rel='stylesheet' type='text/css'>
<meta name='viewport' content='width=device-width, initial-scale=1.0'>
<link href='{{ sitepath }}static/css/linx.css?v=1' media='screen, projection' rel='stylesheet' type='text/css'>
<link href='{{ sitepath }}static/css/hint.css' rel='stylesheet' type='text/css'> <link href='{{ sitepath }}static/css/hint.css' rel='stylesheet' type='text/css'>
<link href='{{ sitepath }}static/images/favicon.gif' rel='icon' type='image/gif'> <link href='{{ sitepath }}static/images/favicon.gif' rel='icon' type='image/gif'>
{% block head %}{% endblock %} {% block head %}{% endblock %}

4
templates/display/audio.html

@ -2,8 +2,8 @@
{% block main %} {% block main %}
<audio class="display-audio" controls preload='auto'> <audio class="display-audio" controls preload='auto'>
<source src='{{ sitepath }}selif/{{ filename }}'>
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
<source src='{{ sitepath }}{{ selifpath }}{{ filename }}'>
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
</audio> </audio>
<div class="normal"> <div class="normal">
<a href="{{ sitepath }}selif/{{ filename }}" class="download-btn">Download</a> <a href="{{ sitepath }}selif/{{ filename }}" class="download-btn">Download</a>

22
templates/display/base.html

@ -6,32 +6,22 @@
{% block content %} {% block content %}
<div id="info" class="dinfo">
<div class="float-left" id="filename">
<div id="info" class="dinfo info-flex">
<div id="filename">
{{ filename }} {{ filename }}
</div> </div>
<div class="right">
<div class="info-actions">
{% if expiry %} {% if expiry %}
<span>file expires in {{ expiry }}</span> | <span>file expires in {{ expiry }}</span> |
{% endif %} {% endif %}
{% block infomore %}{% endblock %} {% block infomore %}{% endblock %}
<span>{{ size }}</span> | <span>{{ size }}</span> |
{% if shorturlEnabled %}
{% if shorturl %}
<a class="hint--top" aria-label="Click to copy into clipboard" id="shorturl"
style="cursor: pointer;" href="{{shorturl}}">{{shorturl}}</a> |
{% else %}
<a class="hint--top" aria-label="Click to retrieve shortened url" id="shorturl"
data-url="{{ sitepath }}{{filename}}/short" style="cursor: pointer;">short url</a> |
{% endif %}
{% endif %}
<a href="{{ filename }}/torrent" download>torrent</a> | <a href="{{ filename }}/torrent" download>torrent</a> |
<a href="{{ sitepath }}selif/{{ filename }}" download>get</a>
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" download>get</a>
</div> </div>
{% block infoleft %}{% endblock %} {% block infoleft %}{% endblock %}
<div class="clear"></div>
</div> </div>
<div id="main" {% block mainmore %}{% endblock %}> <div id="main" {% block mainmore %}{% endblock %}>
@ -43,8 +33,4 @@
</div> </div>
<script src="{{ sitepath }}static/js/clipboard.js"></script> <script src="{{ sitepath }}static/js/clipboard.js"></script>
{% if shorturlEnabled %}
<script src="{{ sitepath }}static/js/shorturl.js"></script>
{% endif %}
{% endblock %} {% endblock %}

37
templates/display/bin.html

@ -11,37 +11,34 @@
{% block infoleft %} {% block infoleft %}
<div id="editform"> <div id="editform">
<form id="reply" action='{{ sitepath }}upload' method='post' >
<div class="right">
<select id="expiry" name="expires">
<option disabled=disabled>Expires:</option>
<option value="0">never</option>
<option value="60">a minute</option>
<option value="300">5 minutes</option>
<option value="3600">an hour</option>
<option value="86400">a day</option>
<option value="604800">a week</option>
<option value="2419200">a month</option>
<option value="29030400">a year</option>
</select>
<button id="save">save</button>
<form id="reply" action='{{ sitepath }}upload' method='post'>
<div class="info-flex">
<div>
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
</div>
<div class="info-actions">
<select id="expiry" name="expires">
<option disabled>Expires:</option>
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %}
</select>
<button type="submit" id="save">Save</button>
</div>
</div> </div>
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
<textarea name='content' id="newcontent" class="editor"></textarea> <textarea name='content' id="newcontent" class="editor"></textarea>
</form> </form>
</div> </div>
{% endblock %} {% endblock %}
{%block infomore %}
{% block infomore %}
<label>wrap <input id="wordwrap" type="checkbox" checked></label> | <label>wrap <input id="wordwrap" type="checkbox" checked></label> |
{% endblock %} {% endblock %}
{% block main %} {% block main %}
<div id="normal-content" class="normal fixed"> <div id="normal-content" class="normal fixed">
<pre id="normal-code"><code id="codeb" class="{{ extra.lang_hl }}">{{ extra.contents }}</code></pre> <pre id="normal-code"><code id="codeb" class="{{ extra.lang_hl }}">{{ extra.contents }}</code></pre>
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
</div> </div>
@ -51,5 +48,5 @@
{% endif %} {% endif %}
<script src="{{ sitepath }}static/js/util.js"></script> <script src="{{ sitepath }}static/js/util.js"></script>
<script src="{{ sitepath }}static/js/bin.js"></script>
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
{% endblock %} {% endblock %}

4
templates/display/file.html

@ -2,8 +2,8 @@
{% block main %} {% block main %}
<div class="normal display-file"> <div class="normal display-file">
<p class="center">You are requesting <a href="{{ sitepath }}selif/{{ filename }}">{{ filename }}</a>, click below to download.</p>
<a href="{{ sitepath }}selif/{{ filename }}" class="download-btn">Download</a>
<p class="center">You are requesting <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">{{ filename }}</a>, click below to download.</p>
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" class="download-btn">Download</a>
{% if files|length > 0 %} {% if files|length > 0 %}
<p>Contents of the archive:</p> <p>Contents of the archive:</p>

4
templates/display/image.html

@ -1,7 +1,7 @@
{% extends "base.html" %} {% extends "base.html" %}
{% block main %} {% block main %}
<a href="{{ sitepath }}selif/{{ filename }}">
<img class="display-image" src="{{ sitepath }}selif/{{ filename }}" />
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}">
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
</a> </a>
{% endblock %} {% endblock %}

4
templates/display/pdf.html

@ -1,10 +1,10 @@
{% extends "base.html" %} {% extends "base.html" %}
{% block main %} {% block main %}
<object class="display-pdf" data="{{ sitepath }}selif/{{ filename }}" type="application/pdf">
<object class="display-pdf" data="{{ sitepath }}{{ selifpath }}{{ filename }}" type="application/pdf">
<p>It appears your Web browser is not configured to display PDF files. <p>It appears your Web browser is not configured to display PDF files.
No worries, just <a href="{{ sitepath }}selif/{{ filename }}">click here to download the PDF file.</a></p>
No worries, just <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here to download the PDF file.</a></p>
</object> </object>
{% endblock %} {% endblock %}

34
templates/display/story.html

@ -9,24 +9,22 @@
{% block infoleft %} {% block infoleft %}
<div id="editform"> <div id="editform">
<form id="reply" action='{{ sitepath }}upload' method='post' >
<div class="right">
<select id="expiry" name="expires">
<option disabled=disabled>Expires:</option>
<option value="0">never</option>
<option value="60">a minute</option>
<option value="300">5 minutes</option>
<option value="3600">an hour</option>
<option value="86400">a day</option>
<option value="604800">a week</option>
<option value="2419200">a month</option>
<option value="29030400">a year</option>
</select>
<button id="save">save</button>
<form id="reply" action='{{ sitepath }}upload' method='post'>
<div class="info-flex">
<div>
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
</div>
<div class="info-actions">
<select id="expiry" name="expires">
<option disabled>Expires:</option>
{% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %}
</select>
<button type="submit" id="save">Save</button>
</div>
</div> </div>
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
<textarea name='content' id="newcontent" class="editor"></textarea> <textarea name='content' id="newcontent" class="editor"></textarea>
</form> </form>
</div> </div>
@ -39,10 +37,10 @@
{% block main %} {% block main %}
<div id="normal-content" class="normal"> <div id="normal-content" class="normal">
<pre id="normal-code"><code id="codeb" class="story">{% for line in lines %}{% if line|make_list|first == ">" %}<span class="storygreen">{{ line }}</span>{% else %}<span class="storyred">{{ line }}</span>{% endif %}{% endfor %}</code></pre> <pre id="normal-code"><code id="codeb" class="story">{% for line in lines %}{% if line|make_list|first == ">" %}<span class="storygreen">{{ line }}</span>{% else %}<span class="storyred">{{ line }}</span>{% endif %}{% endfor %}</code></pre>
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
</div> </div>
<script src="{{ sitepath }}static/js/util.js"></script> <script src="{{ sitepath }}static/js/util.js"></script>
<script src="{{ sitepath }}static/js/bin.js"></script>
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
{% endblock %} {% endblock %}

4
templates/display/video.html

@ -2,7 +2,7 @@
{% block main %} {% block main %}
<video class="display-video" controls autoplay> <video class="display-video" controls autoplay>
<source src="{{ sitepath }}selif/{{ filename }}"/>
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
<source src="{{ sitepath }}{{ selifpath }}{{ filename }}"/>
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
</video> </video>
{% endblock %} {% endblock %}

4
templates/index.html

@ -13,10 +13,11 @@
</div> </div>
<div id="dzone" class="dz-default dz-message"> <div id="dzone" class="dz-default dz-message">
<span>Click or Drop file(s)</span>
<span>Click or Drop file(s) or Paste image</span>
</div> </div>
<div id="choices"> <div id="choices">
<label>{% if not forcerandom %}<input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename{% endif %}</label>
<div id="expiry"> <div id="expiry">
<label>File expiry: <label>File expiry:
<select name="expires" id="expires"> <select name="expires" id="expires">
@ -26,7 +27,6 @@
</select> </select>
</label> </label>
</div> </div>
<label><input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename</label>
</div> </div>
<div class="clear"></div> <div class="clear"></div>
</form> </form>

20
templates/paste.html

@ -2,24 +2,24 @@
{% block content %} {% block content %}
<form id="reply" action='{{ sitepath }}upload' method='post'> <form id="reply" action='{{ sitepath }}upload' method='post'>
<div id="main">
<div id="info" class="ninfo">
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)" />.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
<div class="right">
<div id="main" class="paste">
<div id="info" class="info-flex">
<div>
{% if not forcerandom %}<span class="hint--top hint--bounce" data-hint="Leave empty for random filename"><input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename" /></span>{% endif %}.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
</div>
<div>
<select id="expiry" name="expires"> <select id="expiry" name="expires">
<option disabled="disabled">Expires:</option>
<option disabled>Expires:</option>
{% for expiry in expirylist %} {% for expiry in expirylist %}
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option> <option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
{% endfor %} {% endfor %}
</select> </select>
<input type="submit" value="Paste">
<button type="submit">Paste</button>
</div> </div>
</div> </div>
<div id="inner_content">
<textarea name='content' id="content" class="editor"></textarea>
<div id="inner_content" class="padme">
<textarea name='content' id="content" class="editor"></textarea>
</div> </div>
</div> </div>
</form> </form>

67
torrent.go

@ -2,64 +2,44 @@ package main
import ( import (
"bytes" "bytes"
"crypto/sha1"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"time" "time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/andreimarcu/linx-server/torrent"
"github.com/zeebo/bencode" "github.com/zeebo/bencode"
"github.com/zenazn/goji/web" "github.com/zenazn/goji/web"
) )
const (
TORRENT_PIECE_LENGTH = 262144
)
type TorrentInfo struct {
PieceLength int `bencode:"piece length"`
Pieces string `bencode:"pieces"`
Name string `bencode:"name"`
Length int `bencode:"length"`
}
type Torrent struct {
Encoding string `bencode:"encoding"`
Info TorrentInfo `bencode:"info"`
UrlList []string `bencode:"url-list"`
}
func hashPiece(piece []byte) []byte {
h := sha1.New()
h.Write(piece)
return h.Sum(nil)
}
func createTorrent(fileName string, f io.Reader, r *http.Request) ([]byte, error) {
url := getSiteURL(r) + Config.selifPath + fileName
chunk := make([]byte, torrent.TORRENT_PIECE_LENGTH)
func createTorrent(fileName string, f io.ReadCloser, r *http.Request) ([]byte, error) {
chunk := make([]byte, TORRENT_PIECE_LENGTH)
torrent := Torrent{
t := torrent.Torrent{
Encoding: "UTF-8", Encoding: "UTF-8",
Info: TorrentInfo{
PieceLength: TORRENT_PIECE_LENGTH,
Info: torrent.TorrentInfo{
PieceLength: torrent.TORRENT_PIECE_LENGTH,
Name: fileName, Name: fileName,
}, },
UrlList: []string{fmt.Sprintf("%sselif/%s", getSiteURL(r), fileName)},
UrlList: []string{url},
} }
for { for {
n, err := f.Read(chunk)
n, err := io.ReadFull(f, chunk)
if err == io.EOF { if err == io.EOF {
break break
} else if err != nil {
} else if err != nil && err != io.ErrUnexpectedEOF {
return []byte{}, err return []byte{}, err
} }
torrent.Info.Length += n
torrent.Info.Pieces += string(hashPiece(chunk[:n]))
t.Info.Length += n
t.Info.Pieces += string(torrent.HashPiece(chunk[:n]))
} }
data, err := bencode.EncodeBytes(&torrent)
data, err := bencode.EncodeBytes(&t)
if err != nil { if err != nil {
return []byte{}, err return []byte{}, err
} }
@ -70,21 +50,24 @@ func createTorrent(fileName string, f io.ReadCloser, r *http.Request) ([]byte, e
func fileTorrentHandler(c web.C, w http.ResponseWriter, r *http.Request) { func fileTorrentHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"] fileName := c.URLParams["name"]
err := checkFile(fileName)
if err == NotFoundErr {
metadata, f, err := storageBackend.Get(fileName)
if err == backends.NotFoundErr {
notFoundHandler(c, w, r) notFoundHandler(c, w, r)
return return
} else if err == BadMetadata {
} else if err == backends.BadMetadata {
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.") oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
return return
} else if err != nil {
oopsHandler(c, w, r, RespAUTO, err.Error())
return
} }
defer f.Close()
f, err := fileBackend.Open(fileName)
if err != nil {
oopsHandler(c, w, r, RespHTML, "Could not create torrent.")
if expiry.IsTsExpired(metadata.Expiry) {
storageBackend.Delete(fileName)
notFoundHandler(c, w, r)
return return
} }
defer f.Close()
encoded, err := createTorrent(fileName, f, r) encoded, err := createTorrent(fileName, f, r)
if err != nil { if err != nil {

28
torrent/torrent.go

@ -0,0 +1,28 @@
package torrent
import (
"crypto/sha1"
)
const (
TORRENT_PIECE_LENGTH = 262144
)
type TorrentInfo struct {
PieceLength int `bencode:"piece length"`
Pieces string `bencode:"pieces"`
Name string `bencode:"name"`
Length int `bencode:"length"`
}
type Torrent struct {
Encoding string `bencode:"encoding"`
Info TorrentInfo `bencode:"info"`
UrlList []string `bencode:"url-list"`
}
func HashPiece(piece []byte) []byte {
h := sha1.New()
h.Write(piece)
return h.Sum(nil)
}

7
torrent_test.go

@ -5,12 +5,13 @@ import (
"os" "os"
"testing" "testing"
"github.com/andreimarcu/linx-server/torrent"
"github.com/zeebo/bencode" "github.com/zeebo/bencode"
) )
func TestCreateTorrent(t *testing.T) { func TestCreateTorrent(t *testing.T) {
fileName := "server.go" fileName := "server.go"
var decoded Torrent
var decoded torrent.Torrent
f, err := os.Open("server.go") f, err := os.Open("server.go")
if err != nil { if err != nil {
@ -45,14 +46,14 @@ func TestCreateTorrent(t *testing.T) {
t.Fatal("Length was less than or equal to 0, expected more") t.Fatal("Length was less than or equal to 0, expected more")
} }
tracker := fmt.Sprintf("%sselif/%s", Config.siteURL, fileName)
tracker := fmt.Sprintf("%s%s%s", Config.siteURL, Config.selifPath, fileName)
if decoded.UrlList[0] != tracker { if decoded.UrlList[0] != tracker {
t.Fatalf("First entry in URL list was %s, expected %s", decoded.UrlList[0], tracker) t.Fatalf("First entry in URL list was %s, expected %s", decoded.UrlList[0], tracker)
} }
} }
func TestCreateTorrentWithImage(t *testing.T) { func TestCreateTorrentWithImage(t *testing.T) {
var decoded Torrent
var decoded torrent.Torrent
f, err := os.Open("static/images/404.jpg") f, err := os.Open("static/images/404.jpg")
if err != nil { if err != nil {

143
upload.go

@ -15,11 +15,14 @@ import (
"strings" "strings"
"time" "time"
"github.com/andreimarcu/linx-server/backends"
"github.com/andreimarcu/linx-server/expiry"
"github.com/dchest/uniuri" "github.com/dchest/uniuri"
"github.com/zenazn/goji/web" "github.com/zenazn/goji/web"
"gopkg.in/h2non/filetype.v1" "gopkg.in/h2non/filetype.v1"
) )
var FileTooLargeError = errors.New("File too large.")
var fileBlacklist = map[string]bool{ var fileBlacklist = map[string]bool{
"favicon.ico": true, "favicon.ico": true,
"index.htm": true, "index.htm": true,
@ -32,21 +35,22 @@ var fileBlacklist = map[string]bool{
// Describes metadata directly from the user request // Describes metadata directly from the user request
type UploadRequest struct { type UploadRequest struct {
src io.Reader src io.Reader
size int64
filename string filename string
expiry time.Duration // Seconds until expiry, 0 = never expiry time.Duration // Seconds until expiry, 0 = never
deleteKey string // Empty string if not defined
randomBarename bool randomBarename bool
deletionKey string // Empty string if not defined
} }
// Metadata associated with a file as it would actually be stored // Metadata associated with a file as it would actually be stored
type Upload struct { type Upload struct {
Filename string // Final filename on disk Filename string // Final filename on disk
Metadata Metadata
Metadata backends.Metadata
} }
func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) { func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
if !strictReferrerCheck(r, getSiteURL(r), []string{"Linx-Delete-Key", "Linx-Expiry", "Linx-Randomize", "X-Requested-With"}) { if !strictReferrerCheck(r, getSiteURL(r), []string{"Linx-Delete-Key", "Linx-Expiry", "Linx-Randomize", "X-Requested-With"}) {
badRequestHandler(c, w, r)
badRequestHandler(c, w, r, RespAUTO, "")
return return
} }
@ -63,32 +67,39 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
} }
defer file.Close() defer file.Close()
r.ParseForm()
if r.Form.Get("randomize") == "true" {
upReq.randomBarename = true
}
upReq.expiry = parseExpiry(r.Form.Get("expires"))
upReq.src = file upReq.src = file
upReq.size = headers.Size
upReq.filename = headers.Filename upReq.filename = headers.Filename
} else { } else {
if r.FormValue("content") == "" {
oopsHandler(c, w, r, RespHTML, "Empty file")
if r.PostFormValue("content") == "" {
badRequestHandler(c, w, r, RespAUTO, "Empty file")
return return
} }
extension := r.FormValue("extension")
extension := r.PostFormValue("extension")
if extension == "" { if extension == "" {
extension = "txt" extension = "txt"
} }
upReq.src = strings.NewReader(r.FormValue("content"))
upReq.expiry = parseExpiry(r.FormValue("expires"))
upReq.filename = r.FormValue("filename") + "." + extension
content := r.PostFormValue("content")
upReq.src = strings.NewReader(content)
upReq.size = int64(len(content))
upReq.filename = r.PostFormValue("filename") + "." + extension
}
upReq.expiry = parseExpiry(r.PostFormValue("expires"))
if r.PostFormValue("randomize") == "true" {
upReq.randomBarename = true
} }
upload, err := processUpload(upReq) upload, err := processUpload(upReq)
if strings.EqualFold("application/json", r.Header.Get("Accept")) { if strings.EqualFold("application/json", r.Header.Get("Accept")) {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespJSON, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error()) oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
return return
} }
@ -97,14 +108,16 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=UTF-8") w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.Write(js) w.Write(js)
} else { } else {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespHTML, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespHTML, "Could not upload file: "+err.Error()) oopsHandler(c, w, r, RespHTML, "Could not upload file: "+err.Error())
return return
} }
http.Redirect(w, r, Config.sitePath+upload.Filename, 303) http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
} }
} }
func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) { func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
@ -113,12 +126,15 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
defer r.Body.Close() defer r.Body.Close()
upReq.filename = c.URLParams["name"] upReq.filename = c.URLParams["name"]
upReq.src = r.Body
upReq.src = http.MaxBytesReader(w, r.Body, Config.maxSize)
upload, err := processUpload(upReq) upload, err := processUpload(upReq)
if strings.EqualFold("application/json", r.Header.Get("Accept")) { if strings.EqualFold("application/json", r.Header.Get("Accept")) {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespJSON, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error()) oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
return return
} }
@ -127,7 +143,10 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=UTF-8") w.Header().Set("Content-Type", "application/json; charset=UTF-8")
w.Write(js) w.Write(js)
} else { } else {
if err != nil {
if err == FileTooLargeError || err == backends.FileEmptyError {
badRequestHandler(c, w, r, RespPLAIN, err.Error())
return
} else if err != nil {
oopsHandler(c, w, r, RespPLAIN, "Could not upload file: "+err.Error()) oopsHandler(c, w, r, RespPLAIN, "Could not upload file: "+err.Error())
return return
} }
@ -160,8 +179,8 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
} }
upReq.filename = filepath.Base(grabUrl.Path) upReq.filename = filepath.Base(grabUrl.Path)
upReq.src = resp.Body
upReq.deletionKey = r.FormValue("deletekey")
upReq.src = http.MaxBytesReader(w, resp.Body, Config.maxSize)
upReq.deleteKey = r.FormValue("deletekey")
upReq.randomBarename = r.FormValue("randomize") == "yes" upReq.randomBarename = r.FormValue("randomize") == "yes"
upReq.expiry = parseExpiry(r.FormValue("expiry")) upReq.expiry = parseExpiry(r.FormValue("expiry"))
@ -191,20 +210,26 @@ func uploadHeaderProcess(r *http.Request, upReq *UploadRequest) {
upReq.randomBarename = true upReq.randomBarename = true
} }
upReq.deletionKey = r.Header.Get("Linx-Delete-Key")
upReq.deleteKey = r.Header.Get("Linx-Delete-Key")
// Get seconds until expiry. Non-integer responses never expire. // Get seconds until expiry. Non-integer responses never expire.
expStr := r.Header.Get("Linx-Expiry") expStr := r.Header.Get("Linx-Expiry")
upReq.expiry = parseExpiry(expStr) upReq.expiry = parseExpiry(expStr)
} }
func processUpload(upReq UploadRequest) (upload Upload, err error) { func processUpload(upReq UploadRequest) (upload Upload, err error) {
// Determine the appropriate filename, then write to disk
if upReq.size > Config.maxSize {
return upload, FileTooLargeError
}
// Determine the appropriate filename
barename, extension := barePlusExt(upReq.filename) barename, extension := barePlusExt(upReq.filename)
randomize := false
// Randomize the "barename" (filename without extension) if needed
if upReq.randomBarename || len(barename) == 0 { if upReq.randomBarename || len(barename) == 0 {
barename = generateBarename() barename = generateBarename()
randomize = true
} }
var header []byte var header []byte
@ -213,7 +238,7 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
header = make([]byte, 512) header = make([]byte, 512)
n, _ := upReq.src.Read(header) n, _ := upReq.src.Read(header)
if n == 0 { if n == 0 {
return upload, errors.New("Empty file")
return upload, backends.FileEmptyError
} }
header = header[:n] header = header[:n]
@ -229,28 +254,44 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
upload.Filename = strings.Join([]string{barename, extension}, ".") upload.Filename = strings.Join([]string{barename, extension}, ".")
upload.Filename = strings.Replace(upload.Filename, " ", "", -1) upload.Filename = strings.Replace(upload.Filename, " ", "", -1)
fileexists, _ := fileBackend.Exists(upload.Filename)
fileexists, _ := storageBackend.Exists(upload.Filename)
// Check if the delete key matches, in which case overwrite // Check if the delete key matches, in which case overwrite
if fileexists { if fileexists {
metad, merr := metadataRead(upload.Filename)
metad, merr := storageBackend.Head(upload.Filename)
if merr == nil { if merr == nil {
if upReq.deletionKey == metad.DeleteKey {
if upReq.deleteKey == metad.DeleteKey {
fileexists = false fileexists = false
} else if Config.forceRandomFilename == true {
// the file exists
// the delete key doesn't match
// force random filenames is enabled
randomize = true
} }
} }
} else if Config.forceRandomFilename == true {
// the file doesn't exist
// force random filenames is enabled
randomize = true
// set fileexists to true to generate a new barename
fileexists = true
} }
for fileexists { for fileexists {
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
if err != nil {
barename = barename + "1"
if randomize {
barename = generateBarename()
} else { } else {
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
if err != nil {
barename = barename + "1"
} else {
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
}
} }
upload.Filename = strings.Join([]string{barename, extension}, ".") upload.Filename = strings.Join([]string{barename, extension}, ".")
fileexists, err = fileBackend.Exists(upload.Filename)
fileexists, err = storageBackend.Exists(upload.Filename)
} }
if fileBlacklist[strings.ToLower(upload.Filename)] { if fileBlacklist[strings.ToLower(upload.Filename)] {
@ -258,31 +299,22 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
} }
// Get the rest of the metadata needed for storage // Get the rest of the metadata needed for storage
var expiry time.Time
var fileExpiry time.Time
if upReq.expiry == 0 { if upReq.expiry == 0 {
expiry = neverExpire
fileExpiry = expiry.NeverExpire
} else { } else {
expiry = time.Now().Add(upReq.expiry)
fileExpiry = time.Now().Add(upReq.expiry)
} }
bytes, err := fileBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src))
if err != nil {
return upload, err
} else if bytes > Config.maxSize {
fileBackend.Delete(upload.Filename)
return upload, errors.New("File too large")
if upReq.deleteKey == "" {
upReq.deleteKey = uniuri.NewLen(30)
} }
upload.Metadata, err = generateMetadata(upload.Filename, expiry, upReq.deletionKey)
if err != nil {
fileBackend.Delete(upload.Filename)
return
}
err = metadataWrite(upload.Filename, &upload.Metadata)
upload.Metadata, err = storageBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src), fileExpiry, upReq.deleteKey)
if err != nil { if err != nil {
fileBackend.Delete(upload.Filename)
return
return upload, err
} }
return return
} }
@ -293,6 +325,7 @@ func generateBarename() string {
func generateJSONresponse(upload Upload, r *http.Request) []byte { func generateJSONresponse(upload Upload, r *http.Request) []byte {
js, _ := json.Marshal(map[string]string{ js, _ := json.Marshal(map[string]string{
"url": getSiteURL(r) + upload.Filename, "url": getSiteURL(r) + upload.Filename,
"direct_url": getSiteURL(r) + Config.selifPath + upload.Filename,
"filename": upload.Filename, "filename": upload.Filename,
"delete_key": upload.Metadata.DeleteKey, "delete_key": upload.Metadata.DeleteKey,
"expiry": strconv.FormatInt(upload.Metadata.Expiry.Unix(), 10), "expiry": strconv.FormatInt(upload.Metadata.Expiry.Unix(), 10),
@ -342,14 +375,14 @@ func parseExpiry(expStr string) time.Duration {
if expStr == "" { if expStr == "" {
return time.Duration(Config.maxExpiry) * time.Second return time.Duration(Config.maxExpiry) * time.Second
} else { } else {
expiry, err := strconv.ParseUint(expStr, 10, 64)
fileExpiry, err := strconv.ParseUint(expStr, 10, 64)
if err != nil { if err != nil {
return time.Duration(Config.maxExpiry) * time.Second return time.Duration(Config.maxExpiry) * time.Second
} else { } else {
if Config.maxExpiry > 0 && (expiry > Config.maxExpiry || expiry == 0) {
expiry = Config.maxExpiry
if Config.maxExpiry > 0 && (fileExpiry > Config.maxExpiry || fileExpiry == 0) {
fileExpiry = Config.maxExpiry
} }
return time.Duration(expiry) * time.Second
return time.Duration(fileExpiry) * time.Second
} }
} }
} }
Loading…
Cancel
Save