Browse Source

add a file blacklist and add robots.txt

Fixes #26
pull/39/head
mutantmonkey 9 years ago
parent
commit
ad9d712a3a
  1. 1
      server.go
  2. 3
      static/robots.txt
  3. 12
      upload.go

1
server.go

@ -108,6 +108,7 @@ func setup() {
goji.Get("/static/*", staticHandler)
goji.Get("/favicon.ico", staticHandler)
goji.Get("/robots.txt", staticHandler)
goji.Get(nameRe, fileDisplayHandler)
goji.Get(selifRe, fileServeHandler)
goji.Get(selifIndexRe, unauthorizedHandler)

3
static/robots.txt

@ -0,0 +1,3 @@
User-agent: *
Allow: /$
Disallow: *

12
upload.go

@ -20,6 +20,14 @@ import (
"github.com/zenazn/goji/web"
)
var fileBlacklist = map[string]bool{
"favicon.ico": true,
"index.htm": true,
"index.html": true,
"index.php": true,
"robots.txt": true,
}
// Describes metadata directly from the user request
type UploadRequest struct {
src io.Reader
@ -227,6 +235,10 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
fileexists = err == nil
}
if fileBlacklist[strings.ToLower(upload.Filename)] {
return upload, errors.New("Prohibited filename")
}
dst, err := os.Create(path.Join(Config.filesDir, upload.Filename))
if err != nil {
return

Loading…
Cancel
Save