Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Chore #223 #333 #450 #692

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ node_modules/
.DS_Store
./tusd
tusd_*_*
.idea
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ RUN set -xe \
-o /go/bin/tusd ./cmd/tusd/main.go

# start a new stage that copies in the binary built in the previous stage
FROM alpine:3.15.0
FROM alpine:3.15.1
WORKDIR /srv/tusd-data

RUN apk add --no-cache ca-certificates jq \
Expand Down
9 changes: 9 additions & 0 deletions cmd/tusd/cli/composer.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,19 @@ func CreateComposer() {
s3Config = s3Config.WithEndpoint(Flags.S3Endpoint).WithS3ForcePathStyle(true)
}

if Flags.AllowCustomFilepath {
stdout.Printf("Saving objects with custom path and filename enabled. Use CustomFilepath metadata.\n")
}

if Flags.DisableDownload {
stdout.Printf("Downloads disabled.\n")
}

// Derive credentials from default credential chain (env, shared, ec2 instance role)
// as per https://github.com/aws/aws-sdk-go#configuring-credentials
store := s3store.New(Flags.S3Bucket, s3.New(session.Must(session.NewSession()), s3Config))
store.ObjectPrefix = Flags.S3ObjectPrefix
store.AllowCustomObjectPath = Flags.AllowCustomFilepath
store.PreferredPartSize = Flags.S3PartSize
store.DisableContentHashes = Flags.S3DisableContentHashes
store.UseIn(Composer)
Expand Down
6 changes: 6 additions & 0 deletions cmd/tusd/cli/flags.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ var Flags struct {
MaxSize int64
UploadDir string
Basepath string
AllowCustomFilepath bool
ShowGreeting bool
DisableDownload bool
DisableDelete bool
Timeout int64
S3Bucket string
S3ObjectPrefix string
Expand Down Expand Up @@ -67,7 +70,10 @@ func ParseFlags() {
flag.Int64Var(&Flags.MaxSize, "max-size", 0, "Maximum size of a single upload in bytes")
flag.StringVar(&Flags.UploadDir, "upload-dir", "./data", "Directory to store uploads in")
flag.StringVar(&Flags.Basepath, "base-path", "/files/", "Basepath of the HTTP server")
flag.BoolVar(&Flags.AllowCustomFilepath, "allow-custom-filepath", false, "Allows to customize path and filename (instead of generated ID, basepath respected). Send it with metadata CustomFilepath value. Currently implemented only for s3store (default false)")
flag.BoolVar(&Flags.ShowGreeting, "show-greeting", true, "Show the greeting message")
flag.BoolVar(&Flags.DisableDownload, "disable-download", false, "Disable the download endpoint")
flag.BoolVar(&Flags.DisableDelete, "disable-delete", false, "Disable the delete endpoint")
flag.Int64Var(&Flags.Timeout, "timeout", 6*1000, "Read timeout for connections in milliseconds. A zero value means that reads will not timeout")
flag.StringVar(&Flags.S3Bucket, "s3-bucket", "", "Use AWS S3 with this bucket as storage backend (requires the AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY and AWS_REGION environment variables to be set)")
flag.StringVar(&Flags.S3ObjectPrefix, "s3-object-prefix", "", "Prefix for S3 object names")
Expand Down
7 changes: 7 additions & 0 deletions cmd/tusd/cli/serve.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,14 @@ func Serve() {
MaxSize: Flags.MaxSize,
BasePath: Flags.Basepath,
RespectForwardedHeaders: Flags.BehindProxy,
DisableDownload: Flags.DisableDownload,
DisableDelete: Flags.DisableDelete,
StoreComposer: Composer,
NotifyCompleteUploads: true,
NotifyTerminatedUploads: true,
NotifyUploadProgress: true,
NotifyCreatedUploads: true,
AllowCustomFilepath: Flags.AllowCustomFilepath,
}

if err := SetupPreHooks(&config); err != nil {
Expand All @@ -56,6 +59,10 @@ func Serve() {

stdout.Printf("Using %s as the base path.\n", basepath)

if Flags.AllowCustomFilepath && Flags.S3Bucket == "" {
stderr.Fatalf("Custom filepath implemented only for S3. You have not provided -s3-bucket.\n")
}

SetupPostHooks(handler)

if Flags.ExposeMetrics {
Expand Down
4 changes: 2 additions & 2 deletions docs/faq.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ This error can occur when you are running tusd's disk storage on a file system w

### How can I prevent users from downloading the uploaded files?

tusd allows any user to retrieve a previously uploaded file by issuing a HTTP GET request to the corresponding upload URL. This is possible as long as the uploaded files on the datastore have not been deleted or moved to another location. While it is a handy feature for debugging and testing your setup, we know that there are situations where you don't want to allow downloads or where you want more control about who downloads what. In these scenarios we recommend to place a proxy in front of tusd which takes on the task of access control or even preventing HTTP GET requests entirely. tusd has no feature built in for controling or disabling downloads on its own because the main focus is on accepting uploads, not serving files.
tusd allows any user to retrieve a previously uploaded file by issuing a HTTP GET request to the corresponding upload URL. This is possible as long as the uploaded files on the datastore have not been deleted or moved to another location. While it is a handy feature for debugging and testing your setup, we know that there are situations where you don't want to allow downloads or where you want more control about who downloads what. In these scenarios we recommend to place a proxy in front of tusd which takes on the task of access control. tusd has main focus is on accepting uploads, not serving files. But if you want to entirely disable downloads, you can use `disable-download` flag.

### How can I keep the original filename for the uploads?

tusd will generate a unique ID for every upload, e.g. `1881febb4343e9b806cad2e676989c0d`, which is also used as the filename for storing the upload. If you want to keep the original filename, e.g. `my_image.png`, you will have to rename the uploaded file manually after the upload is completed. One can use the [`post-finish` hook](https://github.com/tus/tusd/blob/master/docs/hooks.md#post-finish) to be notified once the upload is completed. The client must also be configured to add the filename to the upload's metadata, which can be [accessed inside the hooks](https://github.com/tus/tusd/blob/master/docs/hooks.md#the-hooks-environment) and used for the renaming operation.
tusd will generate a unique ID for every upload, e.g. `1881febb4343e9b806cad2e676989c0d`, which is also used as the filename for storing the upload. If you want to keep the original filename, e.g. `my_image.png`, you will have to rename the uploaded file manually after the upload is completed. One can use the [`post-finish` hook](https://github.com/tus/tusd/blob/master/docs/hooks.md#post-finish) to be notified once the upload is completed. The client must also be configured to add the filename to the upload's metadata, which can be [accessed inside the hooks](https://github.com/tus/tusd/blob/master/docs/hooks.md#the-hooks-environment) and used for the renaming operation. For S3 storage you can use `-allow-custom-filepath` flag and modify full path to object, including its name and extension with `CustomFilepath` metadata value.

### Does tusd support Cross-Origin Resource Sharing (CORS)?

Expand Down
2 changes: 2 additions & 0 deletions docs/usage-binary.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ options:

```
$ tusd -help
-allow-custom-filepath bool
Allows to customize path and filename (instead of generated ID, basepath respected). Send it with metadata CustomFilepath value. Currently implemented only for s3store (default false)
-azure-blob-access-tier string
Blob access tier when uploading new files (possible values: archive, cool, hot, '')
-azure-container-access-type string
Expand Down
10 changes: 10 additions & 0 deletions pkg/handler/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,16 @@ type Config struct {
// absolute URL containing a scheme, e.g. "http://tus.io"
BasePath string
isAbs bool
// AllowCustomFilepath provides possibility to store files not only
// with generated ID but with provided CustomFilepath in metadata, if store
// implementation allows it. Require metadata extension. BasePath respected.
AllowCustomFilepath bool
// DisableDownload indicates whether the server will refuse downloads of the
// uploaded file, by not mounting the GET handler.
DisableDownload bool
// DisableDelete indicates whether the server will refuse delete requests of
// the uploaded file, by not mounting the DELETE handler.
DisableDelete bool
// NotifyCompleteUploads indicates whether sending notifications about
// completed uploads using the CompleteUploads channel should be enabled.
NotifyCompleteUploads bool
Expand Down
2 changes: 1 addition & 1 deletion pkg/handler/cors_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ func TestCORS(t *testing.T) {
ReqHeader: map[string]string{
"Origin": "tus.io",
},
Code: http.StatusMethodNotAllowed,
Code: http.StatusNotFound,
ResHeader: map[string]string{
"Access-Control-Expose-Headers": "Upload-Offset, Location, Upload-Length, Tus-Version, Tus-Resumable, Tus-Max-Size, Tus-Extension, Upload-Metadata, Upload-Defer-Length, Upload-Concat",
"Access-Control-Allow-Origin": "tus.io",
Expand Down
60 changes: 46 additions & 14 deletions pkg/handler/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ package handler

import (
"net/http"

"github.com/bmizerany/pat"
"strings"
)

// Handler is a ready to use handler with routing (using pat)
type Handler struct {
*UnroutedHandler
http.Handler
allowedMethods []string
}

// NewHandler creates a routed tus protocol handler. This is the simplest
Expand All @@ -24,28 +24,60 @@ func NewHandler(config Config) (*Handler, error) {
return nil, err
}

handler, err := NewUnroutedHandler(config)
unroutedHandler, err := NewUnroutedHandler(config)
if err != nil {
return nil, err
}

allowed := []string{http.MethodPost, http.MethodHead, http.MethodPatch}
if config.StoreComposer.UsesTerminater && !config.DisableDelete {
allowed = append(allowed, http.MethodDelete)
}
if !config.DisableDownload {
allowed = append(allowed, http.MethodGet)
}
routedHandler := &Handler{
UnroutedHandler: handler,
UnroutedHandler: unroutedHandler,
allowedMethods: allowed,
}

mux := pat.New()
// This madness made only for saving other code from changes after rid of https://github.com/bmizerany/pat
routedHandler.Handler = unroutedHandler.Middleware(&router{routedHandler})

routedHandler.Handler = handler.Middleware(mux)
return routedHandler, nil
}

mux.Post("", http.HandlerFunc(handler.PostFile))
mux.Head(":id", http.HandlerFunc(handler.HeadFile))
mux.Add("PATCH", ":id", http.HandlerFunc(handler.PatchFile))
mux.Get(":id", http.HandlerFunc(handler.GetFile))
type router struct {
routedHandler *Handler
}

// Only attach the DELETE handler if the Terminate() method is provided
if config.StoreComposer.UsesTerminater {
mux.Del(":id", http.HandlerFunc(handler.DelFile))
func (router *router) ServeHTTP(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodPost:
http.HandlerFunc(router.routedHandler.UnroutedHandler.PostFile).ServeHTTP(w, r)
case http.MethodHead:
http.HandlerFunc(router.routedHandler.UnroutedHandler.HeadFile).ServeHTTP(w, r)
case http.MethodPatch:
http.HandlerFunc(router.routedHandler.UnroutedHandler.PatchFile).ServeHTTP(w, r)
case http.MethodGet:
if !router.routedHandler.config.DisableDownload {
http.HandlerFunc(router.routedHandler.UnroutedHandler.GetFile).ServeHTTP(w, r)
} else {
router.NotAllowed(w, r)
}
case http.MethodDelete:
if router.routedHandler.config.StoreComposer.UsesTerminater && !router.routedHandler.config.DisableDelete {
// Only attach the DELETE handler if the Terminate() method is provided
http.HandlerFunc(router.routedHandler.DelFile).ServeHTTP(w, r)
} else {
router.NotAllowed(w, r)
}
default:
router.NotAllowed(w, r)
}
}

return routedHandler, nil
func (router *router) NotAllowed(w http.ResponseWriter, r *http.Request) {
w.Header().Add("Allow", strings.Join(router.routedHandler.allowedMethods, ", "))
http.Error(w, "Method Not Allowed", 405)
}
46 changes: 33 additions & 13 deletions pkg/handler/unrouted_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,21 @@ import (
const UploadLengthDeferred = "1"

var (
reExtractFileID = regexp.MustCompile(`([^/]+)\/?$`)
reForwardedHost = regexp.MustCompile(`host=([^;]+)`)
reForwardedProto = regexp.MustCompile(`proto=(https?)`)
reMimeType = regexp.MustCompile(`^[a-z]+\/[a-z0-9\-\+\.]+$`)
)

// Regexp tests: https://regex101.com/r/dEqVSE/1
// TODO: cut off query part
func getCustomFilepathIdRegexp(basepath string) *regexp.Regexp {
basepath = strings.Replace(basepath, "/", "", -1)
if basepath == "" {
return regexp.MustCompile(`\/?(.+)\/$|\/?(.+)\/?$|([^\n\/]+)\/?$`)
}
return regexp.MustCompile(`\/?` + basepath + `\/(.+)\/$|\/?` + basepath + `\/(.+)\/?$|([^\/]+)\/?$`)
}

// HTTPError represents an error with an additional status code attached
// which may be used when this error is sent in a HTTP response.
// See the net/http package for standardized status codes.
Expand Down Expand Up @@ -223,7 +232,8 @@ func (handler *UnroutedHandler) Middleware(h http.Handler) http.Handler {
if r.Method == "OPTIONS" {
// Preflight request
header.Add("Access-Control-Allow-Methods", "POST, GET, HEAD, PATCH, DELETE, OPTIONS")
header.Add("Access-Control-Allow-Headers", "Authorization, Origin, X-Requested-With, X-Request-ID, X-HTTP-Method-Override, Content-Type, Upload-Length, Upload-Offset, Tus-Resumable, Upload-Metadata, Upload-Defer-Length, Upload-Concat")
// TODO: add headers from Flags.HttpHooksForwardHeaders and others
header.Add("Access-Control-Allow-Headers", "Authorization, Cookie, Origin, X-Requested-With, X-Request-ID, X-HTTP-Method-Override, Content-Type, Upload-Length, Upload-Offset, Tus-Resumable, Upload-Metadata, Upload-Defer-Length, Upload-Concat")
header.Set("Access-Control-Max-Age", "86400")

} else {
Expand All @@ -238,6 +248,9 @@ func (handler *UnroutedHandler) Middleware(h http.Handler) http.Handler {
// Add nosniff to all responses https://golang.org/src/net/http/server.go#L1429
header.Set("X-Content-Type-Options", "nosniff")

// https://github.com/tus/tusd/issues/450#issuecomment-765392832
header.Set("Access-Control-Allow-Credentials", "true")

// Set appropriated headers in case of OPTIONS method allowing protocol
// discovery and end with an 204 No Content
if r.Method == "OPTIONS" {
Expand Down Expand Up @@ -290,7 +303,7 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
}

// Parse Upload-Concat header
isPartial, isFinal, partialUploadIDs, err := parseConcat(concatHeader)
isPartial, isFinal, partialUploadIDs, err := handler.parseConcat(concatHeader)
if err != nil {
handler.sendError(w, r, err)
return
Expand Down Expand Up @@ -420,7 +433,7 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
func (handler *UnroutedHandler) HeadFile(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()

id, err := extractIDFromPath(r.URL.Path)
id, err := handler.extractIDFromPath(r.RequestURI)
if err != nil {
handler.sendError(w, r, err)
return
Expand Down Expand Up @@ -498,7 +511,7 @@ func (handler *UnroutedHandler) PatchFile(w http.ResponseWriter, r *http.Request
return
}

id, err := extractIDFromPath(r.URL.Path)
id, err := handler.extractIDFromPath(r.RequestURI)
if err != nil {
handler.sendError(w, r, err)
return
Expand Down Expand Up @@ -713,7 +726,7 @@ func (handler *UnroutedHandler) finishUploadIfComplete(ctx context.Context, uplo
func (handler *UnroutedHandler) GetFile(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()

id, err := extractIDFromPath(r.URL.Path)
id, err := handler.extractIDFromPath(r.RequestURI)
if err != nil {
handler.sendError(w, r, err)
return
Expand Down Expand Up @@ -840,7 +853,7 @@ func (handler *UnroutedHandler) DelFile(w http.ResponseWriter, r *http.Request)
return
}

id, err := extractIDFromPath(r.URL.Path)
id, err := handler.extractIDFromPath(r.RequestURI)
if err != nil {
handler.sendError(w, r, err)
return
Expand Down Expand Up @@ -1178,7 +1191,7 @@ func SerializeMetadataHeader(meta map[string]string) string {
// Parse the Upload-Concat header, e.g.
// Upload-Concat: partial
// Upload-Concat: final;http://tus.io/files/a /files/b/
func parseConcat(header string) (isPartial bool, isFinal bool, partialUploads []string, err error) {
func (handler *UnroutedHandler) parseConcat(header string) (isPartial bool, isFinal bool, partialUploads []string, err error) {
if len(header) == 0 {
return
}
Expand All @@ -1199,7 +1212,7 @@ func parseConcat(header string) (isPartial bool, isFinal bool, partialUploads []
continue
}

id, extractErr := extractIDFromPath(value)
id, extractErr := handler.extractIDFromPath(value)
if extractErr != nil {
err = extractErr
return
Expand All @@ -1219,12 +1232,19 @@ func parseConcat(header string) (isPartial bool, isFinal bool, partialUploads []
}

// extractIDFromPath pulls the last segment from the url provided
func extractIDFromPath(url string) (string, error) {
result := reExtractFileID.FindStringSubmatch(url)
if len(result) != 2 {
// or if AllowCustomFilepath enabled, all after Basepath (except last slash)
func (handler *UnroutedHandler) extractIDFromPath(url string) (string, error) {
result := getCustomFilepathIdRegexp(handler.basePath).FindStringSubmatch(url)
// there is 1 match and 3 capturing groups in regex, so 4 submatches
if len(result) < 4 {
return "", ErrNotFound
}
return result[1], nil
for i := 1; i <= 4; i++ {
if result[i] != "" {
return result[i], nil
}
}
return "", ErrNotFound
}

func i64toa(num int64) string {
Expand Down
Loading