fix: drop modify permission for uploading new file (#5270)

This commit is contained in:
Ramires Viana 2025-07-13 03:16:01 -03:00 committed by GitHub
parent 7c716862c1
commit 0f27c91eca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 197 additions and 66 deletions

View File

@ -3,7 +3,6 @@ import { baseURL, tusEndpoint, tusSettings } from "@/utils/constants";
import { useAuthStore } from "@/stores/auth"; import { useAuthStore } from "@/stores/auth";
import { useUploadStore } from "@/stores/upload"; import { useUploadStore } from "@/stores/upload";
import { removePrefix } from "@/api/utils"; import { removePrefix } from "@/api/utils";
import { fetchURL } from "./utils";
const RETRY_BASE_DELAY = 1000; const RETRY_BASE_DELAY = 1000;
const RETRY_MAX_DELAY = 20000; const RETRY_MAX_DELAY = 20000;
@ -28,8 +27,6 @@ export async function upload(
filePath = removePrefix(filePath); filePath = removePrefix(filePath);
const resourcePath = `${tusEndpoint}${filePath}?override=${overwrite}`; const resourcePath = `${tusEndpoint}${filePath}?override=${overwrite}`;
await createUpload(resourcePath);
const authStore = useAuthStore(); const authStore = useAuthStore();
// Exit early because of typescript, tus content can't be a string // Exit early because of typescript, tus content can't be a string
@ -38,7 +35,7 @@ export async function upload(
} }
return new Promise<void | string>((resolve, reject) => { return new Promise<void | string>((resolve, reject) => {
const upload = new tus.Upload(content, { const upload = new tus.Upload(content, {
uploadUrl: `${baseURL}${resourcePath}`, endpoint: `${baseURL}${resourcePath}`,
chunkSize: tusSettings.chunkSize, chunkSize: tusSettings.chunkSize,
retryDelays: computeRetryDelays(tusSettings), retryDelays: computeRetryDelays(tusSettings),
parallelUploads: 1, parallelUploads: 1,
@ -46,6 +43,18 @@ export async function upload(
headers: { headers: {
"X-Auth": authStore.jwt, "X-Auth": authStore.jwt,
}, },
onShouldRetry: function (err) {
const status = err.originalResponse
? err.originalResponse.getStatus()
: 0;
// Do not retry for file conflict.
if (status === 409) {
return false;
}
return true;
},
onError: function (error) { onError: function (error) {
if (CURRENT_UPLOAD_LIST[filePath].interval) { if (CURRENT_UPLOAD_LIST[filePath].interval) {
clearInterval(CURRENT_UPLOAD_LIST[filePath].interval); clearInterval(CURRENT_UPLOAD_LIST[filePath].interval);
@ -92,17 +101,6 @@ export async function upload(
}); });
} }
async function createUpload(resourcePath: string) {
const headResp = await fetchURL(resourcePath, {
method: "POST",
});
if (headResp.status !== 201) {
throw new Error(
`Failed to create an upload: ${headResp.status} ${headResp.statusText}`
);
}
}
function computeRetryDelays(tusSettings: TusSettings): number[] | undefined { function computeRetryDelays(tusSettings: TusSettings): number[] | undefined {
if (!tusSettings.retryCount || tusSettings.retryCount < 1) { if (!tusSettings.retryCount || tusSettings.retryCount < 1) {
// Disable retries altogether // Disable retries altogether
@ -130,7 +128,8 @@ function isTusSupported() {
return tus.isSupported === true; return tus.isSupported === true;
} }
function computeETA(state: ETAState, speed?: number) { function computeETA(speed?: number) {
const state = useUploadStore();
if (state.speedMbyte === 0) { if (state.speedMbyte === 0) {
return Infinity; return Infinity;
} }
@ -138,22 +137,13 @@ function computeETA(state: ETAState, speed?: number) {
(acc: number, size: number) => acc + size, (acc: number, size: number) => acc + size,
0 0
); );
const uploadedSize = state.progress.reduce( const uploadedSize = state.progress.reduce((a, b) => a + b, 0);
(acc: number, progress: Progress) => {
if (typeof progress === "number") {
return acc + progress;
}
return acc;
},
0
);
const remainingSize = totalSize - uploadedSize; const remainingSize = totalSize - uploadedSize;
const speedBytesPerSecond = (speed ?? state.speedMbyte) * 1024 * 1024; const speedBytesPerSecond = (speed ?? state.speedMbyte) * 1024 * 1024;
return remainingSize / speedBytesPerSecond; return remainingSize / speedBytesPerSecond;
} }
function computeGlobalSpeedAndETA() { function computeGlobalSpeedAndETA() {
const uploadStore = useUploadStore();
let totalSpeed = 0; let totalSpeed = 0;
let totalCount = 0; let totalCount = 0;
@ -165,7 +155,7 @@ function computeGlobalSpeedAndETA() {
if (totalCount === 0) return { speed: 0, eta: Infinity }; if (totalCount === 0) return { speed: 0, eta: Infinity };
const averageSpeed = totalSpeed / totalCount; const averageSpeed = totalSpeed / totalCount;
const averageETA = computeETA(uploadStore, averageSpeed); const averageETA = computeETA(averageSpeed);
return { speed: averageSpeed, eta: averageETA }; return { speed: averageSpeed, eta: averageETA };
} }
@ -207,6 +197,9 @@ export function abortAllUploads() {
} }
if (CURRENT_UPLOAD_LIST[filePath].upload) { if (CURRENT_UPLOAD_LIST[filePath].upload) {
CURRENT_UPLOAD_LIST[filePath].upload.abort(true); CURRENT_UPLOAD_LIST[filePath].upload.abort(true);
CURRENT_UPLOAD_LIST[filePath].upload.options!.onError!(
new Error("Upload aborted")
);
} }
delete CURRENT_UPLOAD_LIST[filePath]; delete CURRENT_UPLOAD_LIST[filePath];
} }

View File

@ -30,7 +30,7 @@ export const useUploadStore = defineStore("upload", {
state: (): { state: (): {
id: number; id: number;
sizes: number[]; sizes: number[];
progress: Progress[]; progress: number[];
queue: UploadItem[]; queue: UploadItem[];
uploads: Uploads; uploads: Uploads;
speedMbyte: number; speedMbyte: number;
@ -54,9 +54,7 @@ export const useUploadStore = defineStore("upload", {
} }
const totalSize = state.sizes.reduce((a, b) => a + b, 0); const totalSize = state.sizes.reduce((a, b) => a + b, 0);
const sum = state.progress.reduce((a, b) => a + b, 0);
// TODO: this looks ugly but it works with ts now
const sum = state.progress.reduce((acc, val) => +acc + +val) as number;
return Math.ceil((sum / totalSize) * 100); return Math.ceil((sum / totalSize) * 100);
}, },
getProgressDecimal: (state) => { getProgressDecimal: (state) => {
@ -65,21 +63,14 @@ export const useUploadStore = defineStore("upload", {
} }
const totalSize = state.sizes.reduce((a, b) => a + b, 0); const totalSize = state.sizes.reduce((a, b) => a + b, 0);
const sum = state.progress.reduce((a, b) => a + b, 0);
// TODO: this looks ugly but it works with ts now
const sum = state.progress.reduce((acc, val) => +acc + +val) as number;
return ((sum / totalSize) * 100).toFixed(2); return ((sum / totalSize) * 100).toFixed(2);
}, },
getTotalProgressBytes: (state) => { getTotalProgressBytes: (state) => {
if (state.progress.length === 0 || state.sizes.length === 0) { if (state.progress.length === 0 || state.sizes.length === 0) {
return "0 Bytes"; return "0 Bytes";
} }
const sum = state.progress.reduce( const sum = state.progress.reduce((a, b) => a + b, 0);
(sum, p, i) =>
(sum as number) +
(typeof p === "number" ? p : p ? state.sizes[i] : 0),
0
) as number;
return formatSize(sum); return formatSize(sum);
}, },
getTotalSize: (state) => { getTotalSize: (state) => {
@ -104,7 +95,7 @@ export const useUploadStore = defineStore("upload", {
const isDir = upload.file.isDir; const isDir = upload.file.isDir;
const progress = isDir const progress = isDir
? 100 ? 100
: Math.ceil(((state.progress[id] as number) / size) * 100); : Math.ceil((state.progress[id] / size) * 100);
files.push({ files.push({
id, id,
@ -124,7 +115,7 @@ export const useUploadStore = defineStore("upload", {
}, },
actions: { actions: {
// no context as first argument, use `this` instead // no context as first argument, use `this` instead
setProgress({ id, loaded }: { id: number; loaded: Progress }) { setProgress({ id, loaded }: { id: number; loaded: number }) {
this.progress[id] = loaded; this.progress[id] = loaded;
}, },
setError(error: Error) { setError(error: Error) {
@ -168,7 +159,7 @@ export const useUploadStore = defineStore("upload", {
this.processUploads(); this.processUploads();
}, },
finishUpload(item: UploadItem) { finishUpload(item: UploadItem) {
this.setProgress({ id: item.id, loaded: item.file.size > 0 }); this.setProgress({ id: item.id, loaded: item.file.size });
this.removeJob(item.id); this.removeJob(item.id);
this.processUploads(); this.processUploads();
}, },

View File

@ -28,8 +28,6 @@ interface UploadEntry {
type UploadList = UploadEntry[]; type UploadList = UploadEntry[];
type Progress = number | boolean;
type CurrentUploadList = { type CurrentUploadList = {
[key: string]: { [key: string]: {
upload: import("tus-js-client").Upload; upload: import("tus-js-client").Upload;
@ -43,9 +41,3 @@ type CurrentUploadList = {
interval: number | undefined; interval: number | undefined;
}; };
}; };
interface ETAState {
sizes: number[];
progress: Progress[];
speedMbyte: number;
}

2
go.mod
View File

@ -53,6 +53,7 @@ require (
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jellydator/ttlcache/v3 v3.4.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect github.com/klauspost/pgzip v1.2.6 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect github.com/mikelolasagasti/xz v1.0.1 // indirect
@ -71,6 +72,7 @@ require (
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/net v0.41.0 // indirect golang.org/x/net v0.41.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect golang.org/x/sys v0.33.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect

2
go.sum
View File

@ -150,6 +150,8 @@ github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyf
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY=
github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=

View File

@ -69,7 +69,7 @@ func NewHandler(
api.PathPrefix("/tus").Handler(monkey(tusPostHandler(), "/api/tus")).Methods("POST") api.PathPrefix("/tus").Handler(monkey(tusPostHandler(), "/api/tus")).Methods("POST")
api.PathPrefix("/tus").Handler(monkey(tusHeadHandler(), "/api/tus")).Methods("HEAD", "GET") api.PathPrefix("/tus").Handler(monkey(tusHeadHandler(), "/api/tus")).Methods("HEAD", "GET")
api.PathPrefix("/tus").Handler(monkey(tusPatchHandler(), "/api/tus")).Methods("PATCH") api.PathPrefix("/tus").Handler(monkey(tusPatchHandler(), "/api/tus")).Methods("PATCH")
api.PathPrefix("/tus").Handler(monkey(resourceDeleteHandler(fileCache), "/api/tus")).Methods("DELETE") api.PathPrefix("/tus").Handler(monkey(tusDeleteHandler(), "/api/tus")).Methods("DELETE")
api.PathPrefix("/usage").Handler(monkey(diskUsage, "/api/usage")).Methods("GET") api.PathPrefix("/usage").Handler(monkey(diskUsage, "/api/usage")).Methods("GET")

View File

@ -1,6 +1,7 @@
package http package http
import ( import (
"context"
"errors" "errors"
"fmt" "fmt"
"io" "io"
@ -8,14 +9,76 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"time"
"github.com/jellydator/ttlcache/v3"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/filebrowser/filebrowser/v2/files" "github.com/filebrowser/filebrowser/v2/files"
) )
const maxUploadWait = 3 * time.Minute
// Tracks active uploads along with their respective upload lengths
var activeUploads = initActiveUploads()
func initActiveUploads() *ttlcache.Cache[string, int64] {
cache := ttlcache.New[string, int64]()
cache.OnEviction(func(_ context.Context, reason ttlcache.EvictionReason, item *ttlcache.Item[string, int64]) {
if reason == ttlcache.EvictionReasonExpired {
fmt.Printf("deleting incomplete upload file: \"%s\"", item.Key())
os.Remove(item.Key())
}
})
go cache.Start()
return cache
}
func registerUpload(filePath string, fileSize int64) {
activeUploads.Set(filePath, fileSize, maxUploadWait)
}
func completeUpload(filePath string) {
activeUploads.Delete(filePath)
}
func getActiveUploadLength(filePath string) (int64, error) {
item := activeUploads.Get(filePath)
if item == nil {
return 0, fmt.Errorf("no active upload found for the given path")
}
return item.Value(), nil
}
func keepUploadActive(filePath string) func() {
stop := make(chan bool)
go func() {
ticker := time.NewTicker(2 * time.Second)
defer ticker.Stop()
for {
select {
case <-stop:
return
case <-ticker.C:
activeUploads.Touch(filePath)
}
}
}()
return func() {
close(stop)
}
}
func tusPostHandler() handleFunc { func tusPostHandler() handleFunc {
return withUser(func(_ http.ResponseWriter, r *http.Request, d *data) (int, error) { return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
if !d.user.Perm.Create || !d.Check(r.URL.Path) {
return http.StatusForbidden, nil
}
file, err := files.NewFileInfo(&files.FileOptions{ file, err := files.NewFileInfo(&files.FileOptions{
Fs: d.user.Fs, Fs: d.user.Fs,
Path: r.URL.Path, Path: r.URL.Path,
@ -26,10 +89,6 @@ func tusPostHandler() handleFunc {
}) })
switch { switch {
case errors.Is(err, afero.ErrFileNotFound): case errors.Is(err, afero.ErrFileNotFound):
if !d.user.Perm.Create || !d.Check(r.URL.Path) {
return http.StatusForbidden, nil
}
dirPath := filepath.Dir(r.URL.Path) dirPath := filepath.Dir(r.URL.Path)
if _, statErr := d.user.Fs.Stat(dirPath); os.IsNotExist(statErr) { if _, statErr := d.user.Fs.Stat(dirPath); os.IsNotExist(statErr) {
if mkdirErr := d.user.Fs.MkdirAll(dirPath, files.PermDir); mkdirErr != nil { if mkdirErr := d.user.Fs.MkdirAll(dirPath, files.PermDir); mkdirErr != nil {
@ -41,25 +100,55 @@ func tusPostHandler() handleFunc {
} }
fileFlags := os.O_CREATE | os.O_WRONLY fileFlags := os.O_CREATE | os.O_WRONLY
if r.URL.Query().Get("override") == "true" {
fileFlags |= os.O_TRUNC
}
// if file exists // if file exists
if file != nil { if file != nil {
if file.IsDir { if file.IsDir {
return http.StatusBadRequest, fmt.Errorf("cannot upload to a directory %s", file.RealPath()) return http.StatusBadRequest, fmt.Errorf("cannot upload to a directory %s", file.RealPath())
} }
// Existing files will remain untouched unless explicitly instructed to override
if r.URL.Query().Get("override") != "true" {
return http.StatusConflict, nil
}
// Permission for overwriting the file
if !d.user.Perm.Modify {
return http.StatusForbidden, nil
}
fileFlags |= os.O_TRUNC
} }
openFile, err := d.user.Fs.OpenFile(r.URL.Path, fileFlags, files.PermFile) openFile, err := d.user.Fs.OpenFile(r.URL.Path, fileFlags, files.PermFile)
if err != nil { if err != nil {
return errToStatus(err), err return errToStatus(err), err
} }
if err := openFile.Close(); err != nil { defer openFile.Close()
file, err = files.NewFileInfo(&files.FileOptions{
Fs: d.user.Fs,
Path: r.URL.Path,
Modify: d.user.Perm.Modify,
Expand: false,
ReadHeader: false,
Checker: d,
Content: false,
})
if err != nil {
return errToStatus(err), err return errToStatus(err), err
} }
uploadLength, err := getUploadLength(r)
if err != nil {
return http.StatusBadRequest, fmt.Errorf("invalid upload length: %w", err)
}
// Enables the user to utilize the PATCH endpoint for uploading file data
registerUpload(file.RealPath(), uploadLength)
w.Header().Set("Location", "/api/tus/"+r.URL.Path)
return http.StatusCreated, nil return http.StatusCreated, nil
}) })
} }
@ -67,7 +156,7 @@ func tusPostHandler() handleFunc {
func tusHeadHandler() handleFunc { func tusHeadHandler() handleFunc {
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
w.Header().Set("Cache-Control", "no-store") w.Header().Set("Cache-Control", "no-store")
if !d.Check(r.URL.Path) { if !d.user.Perm.Create || !d.Check(r.URL.Path) {
return http.StatusForbidden, nil return http.StatusForbidden, nil
} }
@ -83,8 +172,13 @@ func tusHeadHandler() handleFunc {
return errToStatus(err), err return errToStatus(err), err
} }
uploadLength, err := getActiveUploadLength(file.RealPath())
if err != nil {
return http.StatusNotFound, err
}
w.Header().Set("Upload-Offset", strconv.FormatInt(file.Size, 10)) w.Header().Set("Upload-Offset", strconv.FormatInt(file.Size, 10))
w.Header().Set("Upload-Length", "-1") w.Header().Set("Upload-Length", strconv.FormatInt(uploadLength, 10))
return http.StatusOK, nil return http.StatusOK, nil
}) })
@ -92,7 +186,7 @@ func tusHeadHandler() handleFunc {
func tusPatchHandler() handleFunc { func tusPatchHandler() handleFunc {
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
if !d.user.Perm.Modify || !d.Check(r.URL.Path) { if !d.user.Perm.Create || !d.Check(r.URL.Path) {
return http.StatusForbidden, nil return http.StatusForbidden, nil
} }
if r.Header.Get("Content-Type") != "application/offset+octet-stream" { if r.Header.Get("Content-Type") != "application/offset+octet-stream" {
@ -101,7 +195,7 @@ func tusPatchHandler() handleFunc {
uploadOffset, err := getUploadOffset(r) uploadOffset, err := getUploadOffset(r)
if err != nil { if err != nil {
return http.StatusBadRequest, fmt.Errorf("invalid upload offset: %w", err) return http.StatusBadRequest, fmt.Errorf("invalid upload offset")
} }
file, err := files.NewFileInfo(&files.FileOptions{ file, err := files.NewFileInfo(&files.FileOptions{
@ -120,6 +214,15 @@ func tusPatchHandler() handleFunc {
return errToStatus(err), err return errToStatus(err), err
} }
uploadLength, err := getActiveUploadLength(file.RealPath())
if err != nil {
return http.StatusNotFound, err
}
// Prevent the upload from being evicted during the transfer
stop := keepUploadActive(file.RealPath())
defer stop()
switch { switch {
case file.IsDir: case file.IsDir:
return http.StatusBadRequest, fmt.Errorf("cannot upload to a directory %s", file.RealPath()) return http.StatusBadRequest, fmt.Errorf("cannot upload to a directory %s", file.RealPath())
@ -148,12 +251,60 @@ func tusPatchHandler() handleFunc {
return http.StatusInternalServerError, fmt.Errorf("could not write to file: %w", err) return http.StatusInternalServerError, fmt.Errorf("could not write to file: %w", err)
} }
w.Header().Set("Upload-Offset", strconv.FormatInt(uploadOffset+bytesWritten, 10)) newOffset := uploadOffset + bytesWritten
w.Header().Set("Upload-Offset", strconv.FormatInt(newOffset, 10))
if newOffset >= uploadLength {
completeUpload(file.RealPath())
_ = d.RunHook(func() error { return nil }, "upload", r.URL.Path, "", d.user)
}
return http.StatusNoContent, nil return http.StatusNoContent, nil
}) })
} }
func tusDeleteHandler() handleFunc {
return withUser(func(_ http.ResponseWriter, r *http.Request, d *data) (int, error) {
if r.URL.Path == "/" || !d.user.Perm.Create {
return http.StatusForbidden, nil
}
file, err := files.NewFileInfo(&files.FileOptions{
Fs: d.user.Fs,
Path: r.URL.Path,
Modify: d.user.Perm.Modify,
Expand: false,
ReadHeader: d.server.TypeDetectionByHeader,
Checker: d,
})
if err != nil {
return errToStatus(err), err
}
_, err = getActiveUploadLength(file.RealPath())
if err != nil {
return http.StatusNotFound, err
}
err = d.user.Fs.RemoveAll(r.URL.Path)
if err != nil {
return errToStatus(err), err
}
completeUpload(file.RealPath())
return http.StatusNoContent, nil
})
}
func getUploadLength(r *http.Request) (int64, error) {
uploadOffset, err := strconv.ParseInt(r.Header.Get("Upload-Length"), 10, 64)
if err != nil {
return 0, fmt.Errorf("invalid upload length: %w", err)
}
return uploadOffset, nil
}
func getUploadOffset(r *http.Request) (int64, error) { func getUploadOffset(r *http.Request) (int64, error) {
uploadOffset, err := strconv.ParseInt(r.Header.Get("Upload-Offset"), 10, 64) uploadOffset, err := strconv.ParseInt(r.Header.Get("Upload-Offset"), 10, 64)
if err != nil { if err != nil {