first commit
Some checks failed
Build / run (push) Has been cancelled

This commit is contained in:
maher
2025-10-29 11:42:25 +01:00
commit 703f50a09d
4595 changed files with 385164 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
import {UploadedFile} from '../uploaded-file';
import {UploadStrategyConfig} from './strategy/upload-strategy';
import {FileUpload} from './file-upload-store';
export function createUpload(
file: UploadedFile | File,
options?: UploadStrategyConfig
): FileUpload {
const uploadedFile =
file instanceof UploadedFile ? file : new UploadedFile(file);
return {
file: uploadedFile,
percentage: 0,
bytesUploaded: 0,
status: 'pending',
options: options || {},
};
}

View File

@@ -0,0 +1,47 @@
import {StoreApi, useStore} from 'zustand';
import {createContext, ReactNode, useContext, useState} from 'react';
import {createFileUploadStore, FileUploadState} from './file-upload-store';
import {useSettings} from '../../core/settings/use-settings';
const FileUploadContext = createContext<StoreApi<FileUploadState>>(null!);
type ExtractState<S> = S extends {
getState: () => infer T;
}
? T
: never;
type UseFileUploadStore = {
(): ExtractState<StoreApi<FileUploadState>>;
<U>(
selector: (state: ExtractState<StoreApi<FileUploadState>>) => U,
equalityFn?: (a: U, b: U) => boolean
): U;
};
// @ts-ignore
export const useFileUploadStore: UseFileUploadStore = (
selector,
equalityFn
) => {
const store = useContext(FileUploadContext);
return useStore(store, selector, equalityFn);
};
interface FileUploadProviderProps {
children: ReactNode;
}
export function FileUploadProvider({children}: FileUploadProviderProps) {
const settings = useSettings();
//lazily create store object only once
const [store] = useState(() => {
return createFileUploadStore({settings});
});
return (
<FileUploadContext.Provider value={store as StoreApi<FileUploadState>}>
{children}
</FileUploadContext.Provider>
);
}

View File

@@ -0,0 +1,195 @@
import {create} from 'zustand';
import {immer} from 'zustand/middleware/immer';
import {Draft, enableMapSet} from 'immer';
import {UploadedFile} from '../uploaded-file';
import {UploadStrategy, UploadStrategyConfig} from './strategy/upload-strategy';
import {MessageDescriptor} from '../../i18n/message-descriptor';
import {FileEntry} from '../file-entry';
import {S3MultipartUpload} from './strategy/s3-multipart-upload';
import {Settings} from '../../core/settings/settings';
import {TusUpload} from './strategy/tus-upload';
import {ProgressTimeout} from './progress-timeout';
import {startUploading} from './start-uploading';
import {createUpload} from './create-file-upload';
enableMapSet();
export interface FileUpload {
file: UploadedFile;
percentage: number;
bytesUploaded: number;
status: 'pending' | 'inProgress' | 'aborted' | 'failed' | 'completed';
errorMessage?: string | MessageDescriptor | null;
entry?: FileEntry;
request?: UploadStrategy;
timer?: ProgressTimeout;
options: UploadStrategyConfig;
meta?: unknown;
}
interface State {
concurrency: number;
fileUploads: Map<string, FileUpload>;
// uploads with pending and inProgress status
activeUploadsCount: number;
completedUploadsCount: number;
}
const initialState: State = {
concurrency: 3,
fileUploads: new Map(),
activeUploadsCount: 0,
completedUploadsCount: 0,
};
interface Actions {
uploadMultiple: (
files: (File | UploadedFile)[] | FileList,
options?: Omit<
UploadStrategyConfig,
// progress would be called for each upload simultaneously
'onProgress' | 'showToastOnRestrictionFail'
>,
) => string[];
uploadSingle: (
file: File | UploadedFile,
options?: UploadStrategyConfig,
) => string;
clearInactive: () => void;
abortUpload: (id: string) => void;
updateFileUpload: (id: string, state: Partial<FileUpload>) => void;
getUpload: (id: string) => FileUpload | undefined;
runQueue: () => void;
reset: () => void;
}
export type FileUploadState = State & Actions;
interface StoreProps {
settings: Settings;
}
export const createFileUploadStore = ({settings}: StoreProps) =>
create<FileUploadState>()(
immer((set, get) => {
return {
...initialState,
reset: () => {
set(initialState);
},
getUpload: uploadId => {
return get().fileUploads.get(uploadId);
},
clearInactive: () => {
set(state => {
state.fileUploads.forEach((upload, key) => {
if (upload.status !== 'inProgress') {
state.fileUploads.delete(key);
}
});
});
get().runQueue();
},
abortUpload: id => {
const upload = get().fileUploads.get(id);
if (upload) {
upload.request?.abort();
get().updateFileUpload(id, {status: 'aborted', percentage: 0});
get().runQueue();
}
},
updateFileUpload: (id, newUploadState) => {
set(state => {
const fileUpload = state.fileUploads.get(id);
if (fileUpload) {
state.fileUploads.set(id, {
...fileUpload,
...newUploadState,
});
// only need to update inProgress count if status of the uploads in queue change
if ('status' in newUploadState) {
updateTotals(state);
}
}
});
},
uploadSingle: (file, userOptions) => {
const upload = createUpload(file, userOptions);
const fileUploads = new Map(get().fileUploads);
fileUploads.set(upload.file.id, upload);
set(state => {
updateTotals(state);
state.fileUploads = fileUploads;
});
get().runQueue();
return upload.file.id;
},
uploadMultiple: (files, options) => {
// create file upload items from specified files
const uploads = new Map<string, FileUpload>(get().fileUploads);
[...files].forEach(file => {
const upload = createUpload(file, options);
uploads.set(upload.file.id, upload);
});
// set state only once, there might be thousands of files, don't want to trigger a rerender for each one
set(state => {
updateTotals(state);
state.fileUploads = uploads;
});
get().runQueue();
return [...uploads.keys()];
},
runQueue: async () => {
const uploads = [...get().fileUploads.values()];
const activeUploads = uploads.filter(u => u.status === 'inProgress');
let concurrency = get().concurrency;
if (
activeUploads.filter(
activeUpload =>
// only upload one file from folder at a time to avoid creating duplicate folders
activeUpload.file.relativePath ||
// only allow one s3 multipart upload at a time, it will already upload multiple parts in parallel
activeUpload.request instanceof S3MultipartUpload ||
// only allow one tus upload if file is larger than chunk size, tus will have parallel uploads already in that case
(activeUpload.request instanceof TusUpload &&
settings.uploads.chunk_size &&
activeUpload.file.size > settings.uploads.chunk_size),
).length
) {
concurrency = 1;
}
if (activeUploads.length < concurrency) {
//const pendingUploads = uploads.filter(u => u.status === 'pending');
//const next = pendingUploads.find(a => !!a.request);
const next = uploads.find(u => u.status === 'pending');
if (next) {
await startUploading(next, get());
}
}
},
};
}),
);
const updateTotals = (state: Draft<FileUploadState>) => {
state.completedUploadsCount = [...state.fileUploads.values()].filter(
u => u.status === 'completed',
).length;
state.activeUploadsCount = [...state.fileUploads.values()].filter(
u => u.status === 'inProgress' || u.status === 'pending',
).length;
};

View File

@@ -0,0 +1,26 @@
export class ProgressTimeout {
public aliveTimer: any;
public isDone = false;
public timeout = 30000;
public timeoutHandler: (() => void) | null = null;
progress() {
// Some browsers fire another progress event when the upload is
// cancelled, so we have to ignore progress after the timer was
// told to stop.
if (this.isDone || !this.timeoutHandler) return;
if (this.timeout > 0) {
clearTimeout(this.aliveTimer);
this.aliveTimer = setTimeout(this.timeoutHandler, this.timeout);
}
}
done() {
if (!this.isDone) {
clearTimeout(this.aliveTimer);
this.aliveTimer = null;
this.isDone = true;
}
}
}

View File

@@ -0,0 +1,129 @@
import {UploadStrategy, UploadStrategyConfig} from './strategy/upload-strategy';
import {UploadedFile} from '../uploaded-file';
import {Disk} from '../types/backend-metadata';
import {S3MultipartUpload} from './strategy/s3-multipart-upload';
import {S3Upload} from './strategy/s3-upload';
import {TusUpload} from './strategy/tus-upload';
import {AxiosUpload} from './strategy/axios-upload';
import {FileUpload, FileUploadState} from './file-upload-store';
import {validateUpload} from './validate-upload';
import {getBootstrapData} from '../../core/bootstrap-data/use-backend-bootstrap-data';
import {toast} from '../../ui/toast/toast';
import {ProgressTimeout} from './progress-timeout';
import {message} from '../../i18n/message';
export async function startUploading(
upload: FileUpload,
state: FileUploadState
): Promise<UploadStrategy | null> {
const settings = getBootstrapData().settings;
const options = upload.options;
const file = upload.file;
// validate file, if validation fails, error the upload and bail
if (options?.restrictions) {
const errorMessage = validateUpload(file, options.restrictions);
if (errorMessage) {
state.updateFileUpload(file.id, {
errorMessage,
status: 'failed',
request: undefined,
timer: undefined,
});
if (options.showToastOnRestrictionFail) {
toast.danger(errorMessage);
}
state.runQueue();
return null;
}
}
// prepare config for file upload strategy
const timer = new ProgressTimeout();
const config: UploadStrategyConfig = {
metadata: {
...options?.metadata,
relativePath: file.relativePath,
disk: options?.metadata?.disk || Disk.uploads,
parentId: options?.metadata?.parentId || '',
},
chunkSize: settings.uploads.chunk_size,
baseUrl: settings.base_url,
onError: errorMessage => {
state.updateFileUpload(file.id, {
errorMessage,
status: 'failed',
});
state.runQueue();
timer.done();
options?.onError?.(errorMessage, file);
},
onSuccess: entry => {
state.updateFileUpload(file.id, {
status: 'completed',
entry,
});
state.runQueue();
timer.done();
options?.onSuccess?.(entry, file);
},
onProgress: ({bytesUploaded, bytesTotal}) => {
const percentage = (bytesUploaded / bytesTotal) * 100;
state.updateFileUpload(file.id, {
percentage,
bytesUploaded,
});
timer.progress();
options?.onProgress?.({bytesUploaded, bytesTotal});
},
};
// choose and create upload strategy, based on file size and settings
const strategy = chooseUploadStrategy(file, config);
const request = await strategy.create(file, config);
// add handler for when upload times out (no progress for 30+ seconds)
timer.timeoutHandler = () => {
request.abort();
state.updateFileUpload(file.id, {
status: 'failed',
errorMessage: message('Upload timed out'),
});
state.runQueue();
};
state.updateFileUpload(file.id, {
status: 'inProgress',
request,
});
request.start();
return request;
}
const OneMB = 1024 * 1024;
const FourMB = 4 * OneMB;
const HundredMB = 100 * OneMB;
const chooseUploadStrategy = (
file: UploadedFile,
config: UploadStrategyConfig
) => {
const settings = getBootstrapData().settings;
const disk = config.metadata?.disk || Disk.uploads;
const driver =
disk === Disk.uploads
? settings.uploads.uploads_driver
: settings.uploads.public_driver;
if (driver?.endsWith('s3') && settings.uploads.s3_direct_upload) {
return file.size >= HundredMB ? S3MultipartUpload : S3Upload;
} else {
// 4MB = Axios, otherwise Tus
return file.size >= FourMB && !settings.uploads.disable_tus
? TusUpload
: AxiosUpload;
}
};

View File

@@ -0,0 +1,71 @@
import {UploadedFile} from '../../uploaded-file';
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {apiClient} from '@common/http/query-client';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {AxiosProgressEvent} from 'axios';
export class AxiosUpload implements UploadStrategy {
private abortController: AbortController;
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig,
) {
this.abortController = new AbortController();
}
async start() {
const formData = new FormData();
const {onSuccess, onError, onProgress, metadata} = this.config;
formData.set('file', this.file.native);
formData.set('workspaceId', `12`);
if (metadata) {
Object.entries(metadata).forEach(([key, value]) => {
formData.set(key, `${value}`);
});
}
const response = await apiClient
.post('file-entries', formData, {
onUploadProgress: (e: AxiosProgressEvent) => {
if (e.event.lengthComputable) {
onProgress?.({
bytesUploaded: e.loaded,
bytesTotal: e.total || 0,
});
}
},
signal: this.abortController.signal,
headers: {
'Content-Type': 'multipart/form-data',
},
})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
onError?.(getAxiosErrorMessage(err), this.file);
}
});
// if upload was aborted, it will be handled and set
// as "aborted" already, no need to set it as "failed"
if (this.abortController.signal.aborted) {
return;
}
if (response && response.data.fileEntry) {
onSuccess?.(response.data.fileEntry, this.file);
}
}
abort() {
this.abortController.abort();
return Promise.resolve();
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig,
): Promise<AxiosUpload> {
return new AxiosUpload(file, config);
}
}

View File

@@ -0,0 +1,331 @@
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {UploadedFile} from '../../uploaded-file';
import axios, {AxiosInstance, AxiosProgressEvent} from 'axios';
import {FileEntry} from '../../file-entry';
import {
getFromLocalStorage,
removeFromLocalStorage,
setInLocalStorage,
} from '@common/utils/hooks/local-storage';
import {apiClient} from '@common/http/query-client';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import axiosRetry from 'axios-retry';
const oneMB = 1024 * 1024;
// chunk size that will be uploaded to s3 per request
const desiredChunkSize = 20 * oneMB;
// how many urls should be pre-signed per call to backend
const batchSize = 10;
// number of concurrent requests to s3 api
const concurrency = 5;
interface ChunkState {
blob: Blob | File;
done: boolean;
etag?: string;
partNumber: number;
bytesUploaded: number;
}
interface SignedUrl {
url: string;
partNumber: number;
}
interface StoredUrl {
createdAt: string;
uploadId: string;
fileKey: string;
}
interface UploadedPart {
PartNumber: number;
ETag: string;
Size: string;
LastModified: string;
}
export class S3MultipartUpload implements UploadStrategy {
private abortController: AbortController;
private chunks: ChunkState[] = [];
private uploadId?: string;
private fileKey?: string;
private readonly chunkAxios: AxiosInstance;
private abortedByUser = false;
private uploadedParts?: UploadedPart[];
get storageKey(): string {
return `s3-multipart::${this.file.fingerprint}`;
}
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig
) {
this.abortController = new AbortController();
this.chunkAxios = axios.create();
axiosRetry(this.chunkAxios, {retries: 3});
}
async start() {
const storedUrl = getFromLocalStorage(this.storageKey);
if (storedUrl) {
await this.getUploadedParts(storedUrl);
}
if (!this.uploadedParts?.length) {
await this.createMultipartUpload();
if (!this.uploadId) return;
}
this.prepareChunks();
const result = await this.uploadParts();
if (result === 'done') {
const isCompleted = await this.completeMultipartUpload();
if (!isCompleted) return;
// catch any errors so below "onError" handler gets executed
try {
const response = await this.createFileEntry();
if (response?.fileEntry) {
this.config.onSuccess?.(response?.fileEntry, this.file);
removeFromLocalStorage(this.storageKey);
return;
}
} catch {}
}
// upload failed
if (!this.abortController.signal.aborted) {
this.abortController.abort();
}
if (!this.abortedByUser) {
this.config.onError?.(null, this.file);
}
}
async abort() {
this.abortedByUser = true;
this.abortController.abort();
await this.abortUploadOnS3();
}
private async uploadParts(): Promise<any> {
const pendingChunks = this.chunks.filter(c => !c.done);
if (!pendingChunks.length) {
return Promise.resolve('done');
}
const signedUrls = await this.batchSignUrls(
pendingChunks.slice(0, batchSize)
);
if (!signedUrls) return;
while (signedUrls.length) {
const batch = signedUrls.splice(0, concurrency);
const pendingUploads = batch.map(item => {
return this.uploadPartToS3(item);
});
const result = await Promise.all(pendingUploads);
// if not all uploads in batch completed, bail
if (!result.every(r => r)) return;
}
return await this.uploadParts();
}
private async batchSignUrls(
batch: ChunkState[]
): Promise<SignedUrl[] | undefined> {
const response = await this.chunkAxios
.post(
'api/v1/s3/multipart/batch-sign-part-urls',
{
partNumbers: batch.map(i => i.partNumber),
uploadId: this.uploadId,
key: this.fileKey,
},
{signal: this.abortController.signal}
)
.then(r => r.data as {urls: SignedUrl[]})
.catch(err => {
if (!this.abortController.signal.aborted) {
this.abortController.abort();
}
});
return response?.urls;
}
private async uploadPartToS3({
url,
partNumber,
}: SignedUrl): Promise<boolean | void> {
const chunk = this.chunks.find(c => c.partNumber === partNumber);
if (!chunk) return;
return this.chunkAxios
.put(url, chunk.blob, {
withCredentials: false,
signal: this.abortController.signal,
onUploadProgress: (e: AxiosProgressEvent) => {
if (!e.event.lengthComputable) return;
chunk.bytesUploaded = e.loaded;
const totalUploaded = this.chunks.reduce(
(n, c) => n + c.bytesUploaded,
0
);
this.config.onProgress?.({
bytesUploaded: totalUploaded,
bytesTotal: this.file.size,
});
},
})
.then(r => {
const etag = r.headers.etag;
if (etag) {
chunk.done = true;
chunk.etag = etag;
return true;
}
})
.catch(err => {
if (!this.abortController.signal.aborted && err !== undefined) {
this.abortController.abort();
}
});
}
private async createMultipartUpload(): Promise<void> {
const response = await apiClient
.post('s3/multipart/create', {
filename: this.file.name,
mime: this.file.mime,
size: this.file.size,
extension: this.file.extension,
...this.config.metadata,
})
.then(r => r.data as {uploadId: string; key: string})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
if (response) {
this.uploadId = response.uploadId;
this.fileKey = response.key;
setInLocalStorage(this.storageKey, {
createdAt: new Date().toISOString(),
fileKey: this.fileKey,
uploadId: this.uploadId,
} as StoredUrl);
}
}
private async getUploadedParts({fileKey, uploadId}: StoredUrl) {
const response = await apiClient
.post('s3/multipart/get-uploaded-parts', {
key: fileKey,
uploadId,
})
.then(r => r.data as {parts: UploadedPart[]})
.catch(() => {
removeFromLocalStorage(this.storageKey);
return null;
});
if (response?.parts?.length) {
this.uploadedParts = response.parts;
this.uploadId = uploadId;
this.fileKey = fileKey;
}
}
private async completeMultipartUpload(): Promise<{location: string} | null> {
return apiClient
.post('s3/multipart/complete', {
key: this.fileKey,
uploadId: this.uploadId,
parts: this.chunks.map(c => {
return {
ETag: c.etag,
PartNumber: c.partNumber,
};
}),
})
.then(r => r.data)
.catch(() => {
this.config.onError?.(null, this.file);
this.abortUploadOnS3();
})
.finally(() => {
removeFromLocalStorage(this.storageKey);
});
}
private async createFileEntry(): Promise<{fileEntry: FileEntry}> {
return await apiClient
.post('s3/entries', {
...this.config.metadata,
clientMime: this.file.mime,
clientName: this.file.name,
filename: this.fileKey!.split('/').pop(),
size: this.file.size,
clientExtension: this.file.extension,
})
.then(r => r.data)
.catch();
}
private prepareChunks() {
this.chunks = [];
// at least 5MB per request, at most 10k requests
const minChunkSize = Math.max(5 * oneMB, Math.ceil(this.file.size / 10000));
const chunkSize = Math.max(desiredChunkSize, minChunkSize);
// Upload zero-sized files in one zero-sized chunk
if (this.file.size === 0) {
this.chunks.push({
blob: this.file.native,
done: false,
partNumber: 1,
bytesUploaded: 0,
});
} else {
let partNumber = 1;
for (let i = 0; i < this.file.size; i += chunkSize) {
const end = Math.min(this.file.size, i + chunkSize);
// check if this part was already uploaded previously
const previouslyUploaded = this.uploadedParts?.find(
p => p.PartNumber === partNumber
);
this.chunks.push({
blob: this.file.native.slice(i, end),
done: !!previouslyUploaded,
partNumber,
etag: previouslyUploaded ? previouslyUploaded.ETag : undefined,
bytesUploaded: previouslyUploaded?.Size
? parseInt(previouslyUploaded?.Size)
: 0,
});
partNumber++;
}
}
}
private abortUploadOnS3() {
return apiClient.post('s3/multipart/abort', {
key: this.fileKey,
uploadId: this.uploadId,
});
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig
): Promise<S3MultipartUpload> {
return new S3MultipartUpload(file, config);
}
}

View File

@@ -0,0 +1,120 @@
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {UploadedFile} from '../../uploaded-file';
import axios, {AxiosProgressEvent} from 'axios';
import {FileEntry} from '../../file-entry';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {apiClient} from '@common/http/query-client';
interface PresignedRequest {
url: string;
key: string;
acl: string;
}
export class S3Upload implements UploadStrategy {
private abortController: AbortController;
private presignedRequest?: PresignedRequest;
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig
) {
this.abortController = new AbortController();
}
async start() {
this.presignedRequest = await this.presignPostUrl();
if (!this.presignedRequest) return;
const result = await this.uploadFileToS3();
if (result !== 'uploaded') return;
const response = await this.createFileEntry();
if (response?.fileEntry) {
this.config.onSuccess?.(response.fileEntry, this.file);
} else if (!this.abortController.signal) {
this.config.onError?.(null, this.file);
}
}
abort() {
this.abortController.abort();
return Promise.resolve();
}
private presignPostUrl(): Promise<PresignedRequest> {
return apiClient
.post(
's3/simple/presign',
{
filename: this.file.name,
mime: this.file.mime,
disk: this.config.metadata?.disk,
size: this.file.size,
extension: this.file.extension,
...this.config.metadata,
},
{signal: this.abortController.signal}
)
.then(r => r.data)
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
private uploadFileToS3() {
const {url, acl} = this.presignedRequest!;
return axios
.put(url, this.file.native, {
signal: this.abortController.signal,
withCredentials: false,
headers: {
'Content-Type': this.file.mime,
'x-amz-acl': acl,
},
onUploadProgress: (e: AxiosProgressEvent) => {
if (e.event.lengthComputable) {
this.config.onProgress?.({
bytesUploaded: e.loaded,
bytesTotal: e.total || 0,
});
}
},
})
.then(() => 'uploaded')
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
private async createFileEntry() {
return await apiClient
.post('s3/entries', {
...this.config.metadata,
clientMime: this.file.mime,
clientName: this.file.name,
filename: this.presignedRequest!.key.split('/').pop(),
size: this.file.size,
clientExtension: this.file.extension,
})
.then(r => {
return r.data as {fileEntry: FileEntry};
})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig
): Promise<S3Upload> {
return new S3Upload(file, config);
}
}

View File

@@ -0,0 +1,90 @@
import {Upload} from 'tus-js-client';
import {UploadedFile} from '../../uploaded-file';
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {FileEntry} from '../../file-entry';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {apiClient} from '@common/http/query-client';
import {getCookie} from 'react-use-cookie';
export class TusUpload implements UploadStrategy {
constructor(private upload: Upload) {}
start() {
this.upload.start();
}
abort() {
return this.upload.abort(true);
}
static async create(
file: UploadedFile,
{
onProgress,
onSuccess,
onError,
metadata,
chunkSize,
baseUrl,
}: UploadStrategyConfig
): Promise<TusUpload> {
const tusFingerprint = ['tus', file.fingerprint, 'drive'].join('-');
const upload = new Upload(file.native, {
fingerprint: () => Promise.resolve(tusFingerprint),
removeFingerprintOnSuccess: true,
endpoint: `${baseUrl}/api/v1/tus/upload`,
chunkSize,
retryDelays: [0, 3000, 5000, 10000, 20000],
overridePatchMethod: true,
metadata: {
name: window.btoa(file.id),
clientName: file.name,
clientExtension: file.extension,
clientMime: file.mime || '',
clientSize: `${file.size}`,
...(metadata as Record<string, string>),
},
headers: {
'X-XSRF-TOKEN': getCookie('XSRF-TOKEN'),
},
onError: err => {
if ('originalResponse' in err && err.originalResponse) {
try {
const message = JSON.parse(err.originalResponse.getBody())?.message;
onError?.(message, file);
} catch (e) {
onError?.(null, file);
}
} else {
onError?.(null, file);
}
},
onProgress(bytesUploaded, bytesTotal) {
onProgress?.({bytesUploaded, bytesTotal});
},
onSuccess: async () => {
const uploadKey = upload.url?.split('/').pop();
try {
if (uploadKey) {
const response = await createFileEntry(uploadKey);
onSuccess?.(response.fileEntry, file);
}
} catch (err) {
localStorage.removeItem(tusFingerprint);
onError?.(getAxiosErrorMessage(err), file);
}
},
});
const previousUploads = await upload.findPreviousUploads();
if (previousUploads.length) {
upload.resumeFromPreviousUpload(previousUploads[0]);
}
return new TusUpload(upload);
}
}
function createFileEntry(uploadKey: string): Promise<{fileEntry: FileEntry}> {
return apiClient.post('tus/entries', {uploadKey}).then(r => r.data);
}

View File

@@ -0,0 +1,20 @@
import {BackendMetadata} from '../../types/backend-metadata';
import {Restrictions} from '../validate-upload';
import {FileEntry} from '../../file-entry';
import {UploadedFile} from '@common/uploads/uploaded-file';
export interface UploadStrategyConfig {
chunkSize?: number;
baseUrl?: string;
restrictions?: Restrictions;
showToastOnRestrictionFail?: boolean;
onProgress?: (progress: {bytesUploaded: number; bytesTotal: number}) => void;
onSuccess?: (entry: FileEntry, file: UploadedFile) => void;
onError?: (message: string | undefined | null, file: UploadedFile) => void;
metadata?: BackendMetadata;
}
export interface UploadStrategy {
start: () => void;
abort: () => Promise<void>;
}

View File

@@ -0,0 +1,93 @@
import {useCallback, useRef} from 'react';
import {useFileUploadStore} from './file-upload-provider';
import {UploadedFile} from '../uploaded-file';
import {UploadStrategyConfig} from './strategy/upload-strategy';
import {openUploadWindow} from '../utils/open-upload-window';
import {useDeleteFileEntries} from '@common/uploads/requests/delete-file-entries';
interface DeleteEntryProps {
onSuccess: () => void;
entryPath?: string;
}
export function useActiveUpload() {
const deleteFileEntries = useDeleteFileEntries();
// use ref for setting ID to avoid extra renders, zustand selector
// will pick up changed selector on first progress event
const uploadIdRef = useRef<string>();
const uploadSingle = useFileUploadStore(s => s.uploadSingle);
const _abortUpload = useFileUploadStore(s => s.abortUpload);
const updateFileUpload = useFileUploadStore(s => s.updateFileUpload);
const activeUpload = useFileUploadStore(s =>
uploadIdRef.current ? s.fileUploads.get(uploadIdRef.current) : null,
);
const uploadFile = useCallback(
(file: File | UploadedFile, config?: UploadStrategyConfig) => {
uploadIdRef.current = uploadSingle(file, config);
},
[uploadSingle],
);
const selectAndUploadFile = useCallback(
async (config?: UploadStrategyConfig) => {
const files = await openUploadWindow({
types: config?.restrictions?.allowedFileTypes,
});
uploadFile(files[0], config);
return files[0];
},
[uploadFile],
);
const deleteEntry = useCallback(
({onSuccess, entryPath}: DeleteEntryProps) => {
const handleSuccess = () => {
if (activeUpload) {
updateFileUpload(activeUpload.file.id, {
...activeUpload,
entry: undefined,
});
}
onSuccess();
};
if (!entryPath && !activeUpload?.entry?.id) {
handleSuccess();
return;
}
deleteFileEntries.mutate(
{
paths: entryPath ? [entryPath] : undefined,
entryIds: activeUpload?.entry?.id
? [activeUpload?.entry?.id]
: undefined,
deleteForever: true,
},
{onSuccess: handleSuccess},
);
},
[deleteFileEntries, activeUpload, updateFileUpload],
);
const abortUpload = useCallback(() => {
if (activeUpload) {
_abortUpload(activeUpload.file.id);
}
}, [activeUpload, _abortUpload]);
return {
uploadFile,
selectAndUploadFile,
percentage: activeUpload?.percentage || 0,
uploadStatus: activeUpload?.status,
entry: activeUpload?.entry,
deleteEntry,
isDeletingEntry: deleteFileEntries.isPending,
activeUpload,
abortUpload,
};
}

View File

@@ -0,0 +1,61 @@
import {UploadedFile} from '../uploaded-file';
import {message} from '../../i18n/message';
import {prettyBytes} from '../utils/pretty-bytes';
import {MessageDescriptor} from '../../i18n/message-descriptor';
import match from 'mime-match';
export interface Restrictions {
maxFileSize?: number;
allowedFileTypes?: string[];
blockedFileTypes?: string[];
}
export function validateUpload(
file: UploadedFile,
restrictions?: Restrictions
): MessageDescriptor | void {
if (!restrictions) return;
const {maxFileSize, allowedFileTypes, blockedFileTypes} = restrictions;
if (maxFileSize && file.size != null && file.size > maxFileSize) {
return message('`:file` exceeds maximum allowed size of :size', {
values: {file: file.name, size: prettyBytes(maxFileSize)},
});
}
if (allowedFileTypes?.length) {
if (!fileMatchesTypes(file, allowedFileTypes)) {
return message('This file type is not allowed');
}
}
if (blockedFileTypes?.length) {
if (fileMatchesTypes(file, blockedFileTypes)) {
return message('This file type is not allowed');
}
}
}
function fileMatchesTypes(file: UploadedFile, types: string[]): boolean {
return (
types
// support multiple file types in a string (video/mp4,audio/mp3,image/png)
.map(type => type.split(','))
.flat()
.some(type => {
// check if this is a mime-type
if (type.includes('/')) {
if (!file.mime) return false;
return match(file.mime.replace(/;.*?$/, ''), type);
}
// otherwise this is likely an extension
const extension = type.replace('.', '').toLowerCase();
if (extension && file.extension) {
return file.extension.toLowerCase() === extension;
}
return false;
})
);
}