first commit
Some checks failed
Build / run (push) Has been cancelled

This commit is contained in:
maher
2025-10-29 11:42:25 +01:00
commit 703f50a09d
4595 changed files with 385164 additions and 0 deletions

View File

@@ -0,0 +1,71 @@
import {UploadedFile} from '../../uploaded-file';
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {apiClient} from '@common/http/query-client';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {AxiosProgressEvent} from 'axios';
export class AxiosUpload implements UploadStrategy {
private abortController: AbortController;
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig,
) {
this.abortController = new AbortController();
}
async start() {
const formData = new FormData();
const {onSuccess, onError, onProgress, metadata} = this.config;
formData.set('file', this.file.native);
formData.set('workspaceId', `12`);
if (metadata) {
Object.entries(metadata).forEach(([key, value]) => {
formData.set(key, `${value}`);
});
}
const response = await apiClient
.post('file-entries', formData, {
onUploadProgress: (e: AxiosProgressEvent) => {
if (e.event.lengthComputable) {
onProgress?.({
bytesUploaded: e.loaded,
bytesTotal: e.total || 0,
});
}
},
signal: this.abortController.signal,
headers: {
'Content-Type': 'multipart/form-data',
},
})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
onError?.(getAxiosErrorMessage(err), this.file);
}
});
// if upload was aborted, it will be handled and set
// as "aborted" already, no need to set it as "failed"
if (this.abortController.signal.aborted) {
return;
}
if (response && response.data.fileEntry) {
onSuccess?.(response.data.fileEntry, this.file);
}
}
abort() {
this.abortController.abort();
return Promise.resolve();
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig,
): Promise<AxiosUpload> {
return new AxiosUpload(file, config);
}
}

View File

@@ -0,0 +1,331 @@
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {UploadedFile} from '../../uploaded-file';
import axios, {AxiosInstance, AxiosProgressEvent} from 'axios';
import {FileEntry} from '../../file-entry';
import {
getFromLocalStorage,
removeFromLocalStorage,
setInLocalStorage,
} from '@common/utils/hooks/local-storage';
import {apiClient} from '@common/http/query-client';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import axiosRetry from 'axios-retry';
const oneMB = 1024 * 1024;
// chunk size that will be uploaded to s3 per request
const desiredChunkSize = 20 * oneMB;
// how many urls should be pre-signed per call to backend
const batchSize = 10;
// number of concurrent requests to s3 api
const concurrency = 5;
interface ChunkState {
blob: Blob | File;
done: boolean;
etag?: string;
partNumber: number;
bytesUploaded: number;
}
interface SignedUrl {
url: string;
partNumber: number;
}
interface StoredUrl {
createdAt: string;
uploadId: string;
fileKey: string;
}
interface UploadedPart {
PartNumber: number;
ETag: string;
Size: string;
LastModified: string;
}
export class S3MultipartUpload implements UploadStrategy {
private abortController: AbortController;
private chunks: ChunkState[] = [];
private uploadId?: string;
private fileKey?: string;
private readonly chunkAxios: AxiosInstance;
private abortedByUser = false;
private uploadedParts?: UploadedPart[];
get storageKey(): string {
return `s3-multipart::${this.file.fingerprint}`;
}
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig
) {
this.abortController = new AbortController();
this.chunkAxios = axios.create();
axiosRetry(this.chunkAxios, {retries: 3});
}
async start() {
const storedUrl = getFromLocalStorage(this.storageKey);
if (storedUrl) {
await this.getUploadedParts(storedUrl);
}
if (!this.uploadedParts?.length) {
await this.createMultipartUpload();
if (!this.uploadId) return;
}
this.prepareChunks();
const result = await this.uploadParts();
if (result === 'done') {
const isCompleted = await this.completeMultipartUpload();
if (!isCompleted) return;
// catch any errors so below "onError" handler gets executed
try {
const response = await this.createFileEntry();
if (response?.fileEntry) {
this.config.onSuccess?.(response?.fileEntry, this.file);
removeFromLocalStorage(this.storageKey);
return;
}
} catch {}
}
// upload failed
if (!this.abortController.signal.aborted) {
this.abortController.abort();
}
if (!this.abortedByUser) {
this.config.onError?.(null, this.file);
}
}
async abort() {
this.abortedByUser = true;
this.abortController.abort();
await this.abortUploadOnS3();
}
private async uploadParts(): Promise<any> {
const pendingChunks = this.chunks.filter(c => !c.done);
if (!pendingChunks.length) {
return Promise.resolve('done');
}
const signedUrls = await this.batchSignUrls(
pendingChunks.slice(0, batchSize)
);
if (!signedUrls) return;
while (signedUrls.length) {
const batch = signedUrls.splice(0, concurrency);
const pendingUploads = batch.map(item => {
return this.uploadPartToS3(item);
});
const result = await Promise.all(pendingUploads);
// if not all uploads in batch completed, bail
if (!result.every(r => r)) return;
}
return await this.uploadParts();
}
private async batchSignUrls(
batch: ChunkState[]
): Promise<SignedUrl[] | undefined> {
const response = await this.chunkAxios
.post(
'api/v1/s3/multipart/batch-sign-part-urls',
{
partNumbers: batch.map(i => i.partNumber),
uploadId: this.uploadId,
key: this.fileKey,
},
{signal: this.abortController.signal}
)
.then(r => r.data as {urls: SignedUrl[]})
.catch(err => {
if (!this.abortController.signal.aborted) {
this.abortController.abort();
}
});
return response?.urls;
}
private async uploadPartToS3({
url,
partNumber,
}: SignedUrl): Promise<boolean | void> {
const chunk = this.chunks.find(c => c.partNumber === partNumber);
if (!chunk) return;
return this.chunkAxios
.put(url, chunk.blob, {
withCredentials: false,
signal: this.abortController.signal,
onUploadProgress: (e: AxiosProgressEvent) => {
if (!e.event.lengthComputable) return;
chunk.bytesUploaded = e.loaded;
const totalUploaded = this.chunks.reduce(
(n, c) => n + c.bytesUploaded,
0
);
this.config.onProgress?.({
bytesUploaded: totalUploaded,
bytesTotal: this.file.size,
});
},
})
.then(r => {
const etag = r.headers.etag;
if (etag) {
chunk.done = true;
chunk.etag = etag;
return true;
}
})
.catch(err => {
if (!this.abortController.signal.aborted && err !== undefined) {
this.abortController.abort();
}
});
}
private async createMultipartUpload(): Promise<void> {
const response = await apiClient
.post('s3/multipart/create', {
filename: this.file.name,
mime: this.file.mime,
size: this.file.size,
extension: this.file.extension,
...this.config.metadata,
})
.then(r => r.data as {uploadId: string; key: string})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
if (response) {
this.uploadId = response.uploadId;
this.fileKey = response.key;
setInLocalStorage(this.storageKey, {
createdAt: new Date().toISOString(),
fileKey: this.fileKey,
uploadId: this.uploadId,
} as StoredUrl);
}
}
private async getUploadedParts({fileKey, uploadId}: StoredUrl) {
const response = await apiClient
.post('s3/multipart/get-uploaded-parts', {
key: fileKey,
uploadId,
})
.then(r => r.data as {parts: UploadedPart[]})
.catch(() => {
removeFromLocalStorage(this.storageKey);
return null;
});
if (response?.parts?.length) {
this.uploadedParts = response.parts;
this.uploadId = uploadId;
this.fileKey = fileKey;
}
}
private async completeMultipartUpload(): Promise<{location: string} | null> {
return apiClient
.post('s3/multipart/complete', {
key: this.fileKey,
uploadId: this.uploadId,
parts: this.chunks.map(c => {
return {
ETag: c.etag,
PartNumber: c.partNumber,
};
}),
})
.then(r => r.data)
.catch(() => {
this.config.onError?.(null, this.file);
this.abortUploadOnS3();
})
.finally(() => {
removeFromLocalStorage(this.storageKey);
});
}
private async createFileEntry(): Promise<{fileEntry: FileEntry}> {
return await apiClient
.post('s3/entries', {
...this.config.metadata,
clientMime: this.file.mime,
clientName: this.file.name,
filename: this.fileKey!.split('/').pop(),
size: this.file.size,
clientExtension: this.file.extension,
})
.then(r => r.data)
.catch();
}
private prepareChunks() {
this.chunks = [];
// at least 5MB per request, at most 10k requests
const minChunkSize = Math.max(5 * oneMB, Math.ceil(this.file.size / 10000));
const chunkSize = Math.max(desiredChunkSize, minChunkSize);
// Upload zero-sized files in one zero-sized chunk
if (this.file.size === 0) {
this.chunks.push({
blob: this.file.native,
done: false,
partNumber: 1,
bytesUploaded: 0,
});
} else {
let partNumber = 1;
for (let i = 0; i < this.file.size; i += chunkSize) {
const end = Math.min(this.file.size, i + chunkSize);
// check if this part was already uploaded previously
const previouslyUploaded = this.uploadedParts?.find(
p => p.PartNumber === partNumber
);
this.chunks.push({
blob: this.file.native.slice(i, end),
done: !!previouslyUploaded,
partNumber,
etag: previouslyUploaded ? previouslyUploaded.ETag : undefined,
bytesUploaded: previouslyUploaded?.Size
? parseInt(previouslyUploaded?.Size)
: 0,
});
partNumber++;
}
}
}
private abortUploadOnS3() {
return apiClient.post('s3/multipart/abort', {
key: this.fileKey,
uploadId: this.uploadId,
});
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig
): Promise<S3MultipartUpload> {
return new S3MultipartUpload(file, config);
}
}

View File

@@ -0,0 +1,120 @@
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {UploadedFile} from '../../uploaded-file';
import axios, {AxiosProgressEvent} from 'axios';
import {FileEntry} from '../../file-entry';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {apiClient} from '@common/http/query-client';
interface PresignedRequest {
url: string;
key: string;
acl: string;
}
export class S3Upload implements UploadStrategy {
private abortController: AbortController;
private presignedRequest?: PresignedRequest;
constructor(
private file: UploadedFile,
private config: UploadStrategyConfig
) {
this.abortController = new AbortController();
}
async start() {
this.presignedRequest = await this.presignPostUrl();
if (!this.presignedRequest) return;
const result = await this.uploadFileToS3();
if (result !== 'uploaded') return;
const response = await this.createFileEntry();
if (response?.fileEntry) {
this.config.onSuccess?.(response.fileEntry, this.file);
} else if (!this.abortController.signal) {
this.config.onError?.(null, this.file);
}
}
abort() {
this.abortController.abort();
return Promise.resolve();
}
private presignPostUrl(): Promise<PresignedRequest> {
return apiClient
.post(
's3/simple/presign',
{
filename: this.file.name,
mime: this.file.mime,
disk: this.config.metadata?.disk,
size: this.file.size,
extension: this.file.extension,
...this.config.metadata,
},
{signal: this.abortController.signal}
)
.then(r => r.data)
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
private uploadFileToS3() {
const {url, acl} = this.presignedRequest!;
return axios
.put(url, this.file.native, {
signal: this.abortController.signal,
withCredentials: false,
headers: {
'Content-Type': this.file.mime,
'x-amz-acl': acl,
},
onUploadProgress: (e: AxiosProgressEvent) => {
if (e.event.lengthComputable) {
this.config.onProgress?.({
bytesUploaded: e.loaded,
bytesTotal: e.total || 0,
});
}
},
})
.then(() => 'uploaded')
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
private async createFileEntry() {
return await apiClient
.post('s3/entries', {
...this.config.metadata,
clientMime: this.file.mime,
clientName: this.file.name,
filename: this.presignedRequest!.key.split('/').pop(),
size: this.file.size,
clientExtension: this.file.extension,
})
.then(r => {
return r.data as {fileEntry: FileEntry};
})
.catch(err => {
if (err.code !== 'ERR_CANCELED') {
this.config.onError?.(getAxiosErrorMessage(err), this.file);
}
});
}
static async create(
file: UploadedFile,
config: UploadStrategyConfig
): Promise<S3Upload> {
return new S3Upload(file, config);
}
}

View File

@@ -0,0 +1,90 @@
import {Upload} from 'tus-js-client';
import {UploadedFile} from '../../uploaded-file';
import {UploadStrategy, UploadStrategyConfig} from './upload-strategy';
import {FileEntry} from '../../file-entry';
import {getAxiosErrorMessage} from '@common/utils/http/get-axios-error-message';
import {apiClient} from '@common/http/query-client';
import {getCookie} from 'react-use-cookie';
export class TusUpload implements UploadStrategy {
constructor(private upload: Upload) {}
start() {
this.upload.start();
}
abort() {
return this.upload.abort(true);
}
static async create(
file: UploadedFile,
{
onProgress,
onSuccess,
onError,
metadata,
chunkSize,
baseUrl,
}: UploadStrategyConfig
): Promise<TusUpload> {
const tusFingerprint = ['tus', file.fingerprint, 'drive'].join('-');
const upload = new Upload(file.native, {
fingerprint: () => Promise.resolve(tusFingerprint),
removeFingerprintOnSuccess: true,
endpoint: `${baseUrl}/api/v1/tus/upload`,
chunkSize,
retryDelays: [0, 3000, 5000, 10000, 20000],
overridePatchMethod: true,
metadata: {
name: window.btoa(file.id),
clientName: file.name,
clientExtension: file.extension,
clientMime: file.mime || '',
clientSize: `${file.size}`,
...(metadata as Record<string, string>),
},
headers: {
'X-XSRF-TOKEN': getCookie('XSRF-TOKEN'),
},
onError: err => {
if ('originalResponse' in err && err.originalResponse) {
try {
const message = JSON.parse(err.originalResponse.getBody())?.message;
onError?.(message, file);
} catch (e) {
onError?.(null, file);
}
} else {
onError?.(null, file);
}
},
onProgress(bytesUploaded, bytesTotal) {
onProgress?.({bytesUploaded, bytesTotal});
},
onSuccess: async () => {
const uploadKey = upload.url?.split('/').pop();
try {
if (uploadKey) {
const response = await createFileEntry(uploadKey);
onSuccess?.(response.fileEntry, file);
}
} catch (err) {
localStorage.removeItem(tusFingerprint);
onError?.(getAxiosErrorMessage(err), file);
}
},
});
const previousUploads = await upload.findPreviousUploads();
if (previousUploads.length) {
upload.resumeFromPreviousUpload(previousUploads[0]);
}
return new TusUpload(upload);
}
}
function createFileEntry(uploadKey: string): Promise<{fileEntry: FileEntry}> {
return apiClient.post('tus/entries', {uploadKey}).then(r => r.data);
}

View File

@@ -0,0 +1,20 @@
import {BackendMetadata} from '../../types/backend-metadata';
import {Restrictions} from '../validate-upload';
import {FileEntry} from '../../file-entry';
import {UploadedFile} from '@common/uploads/uploaded-file';
export interface UploadStrategyConfig {
chunkSize?: number;
baseUrl?: string;
restrictions?: Restrictions;
showToastOnRestrictionFail?: boolean;
onProgress?: (progress: {bytesUploaded: number; bytesTotal: number}) => void;
onSuccess?: (entry: FileEntry, file: UploadedFile) => void;
onError?: (message: string | undefined | null, file: UploadedFile) => void;
metadata?: BackendMetadata;
}
export interface UploadStrategy {
start: () => void;
abort: () => Promise<void>;
}