chore: remove chunked uploads for now

This commit is contained in:
Hampus Kraft
2026-02-19 14:59:46 +00:00
parent bc2a78e5af
commit 1a1d13b571
59 changed files with 2 additions and 1456 deletions

View File

@@ -96,11 +96,6 @@ export const Endpoints = {
CHANNEL_TYPING: (channelId: string) => `/channels/${channelId}/typing`,
CHANNEL_WEBHOOKS: (channelId: string) => `/channels/${channelId}/webhooks`,
CHANNEL_RTC_REGIONS: (channelId: string) => `/channels/${channelId}/rtc-regions`,
CHANNEL_CHUNKED_UPLOADS: (channelId: string) => `/channels/${channelId}/chunked-uploads`,
CHANNEL_CHUNKED_UPLOAD_CHUNK: (channelId: string, uploadId: string, chunkIndex: number) =>
`/channels/${channelId}/chunked-uploads/${uploadId}/chunks/${chunkIndex}`,
CHANNEL_CHUNKED_UPLOAD_COMPLETE: (channelId: string, uploadId: string) =>
`/channels/${channelId}/chunked-uploads/${uploadId}/complete`,
CHANNEL_CALL: (channelId: string) => `/channels/${channelId}/call`,
CHANNEL_CALL_RING: (channelId: string) => `/channels/${channelId}/call/ring`,
CHANNEL_CALL_STOP_RINGING: (channelId: string) => `/channels/${channelId}/call/stop-ringing`,

View File

@@ -1,195 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {Endpoints} from '@app/Endpoints';
import http from '@app/lib/HttpClient';
import {Logger} from '@app/lib/Logger';
import {CHUNKED_UPLOAD_CHUNK_SIZE} from '@fluxer/constants/src/LimitConstants';
const logger = new Logger('ChunkedUploadService');
const MAX_CONCURRENT_CHUNKS = 4;
const MAX_CHUNK_RETRIES = 3;
const RETRY_BASE_DELAY_MS = 1000;
interface ChunkedUploadResult {
upload_filename: string;
file_size: number;
content_type: string;
}
interface InitiateUploadResponse {
upload_id: string;
upload_filename: string;
chunk_size: number;
chunk_count: number;
}
interface UploadChunkResponse {
etag: string;
}
interface CompleteUploadResponse {
upload_filename: string;
file_size: number;
content_type: string;
}
export async function uploadFileChunked(
channelId: string,
file: File,
onProgress?: (loaded: number, total: number) => void,
signal?: AbortSignal,
): Promise<ChunkedUploadResult> {
const initiateResponse = await http.post<InitiateUploadResponse>({
url: Endpoints.CHANNEL_CHUNKED_UPLOADS(channelId),
body: {
filename: file.name,
file_size: file.size,
},
signal,
rejectWithError: true,
});
const {upload_id, chunk_size, chunk_count} = initiateResponse.body;
logger.debug(`Initiated chunked upload: ${upload_id}, ${chunk_count} chunks of ${chunk_size} bytes`);
const chunkProgress = new Array<number>(chunk_count).fill(0);
const etags = new Array<{chunk_index: number; etag: string}>(chunk_count);
function reportProgress() {
if (!onProgress) return;
const loaded = chunkProgress.reduce((sum, bytes) => sum + bytes, 0);
onProgress(loaded, file.size);
}
const chunkIndices = Array.from({length: chunk_count}, (_, i) => i);
let cursor = 0;
const activeTasks: Array<Promise<void>> = [];
async function uploadOneChunk(chunkIndex: number): Promise<void> {
const start = chunkIndex * chunk_size;
const end = Math.min(start + chunk_size, file.size);
const chunkBlob = file.slice(start, end);
const chunkData = new Uint8Array(await chunkBlob.arrayBuffer());
const chunkLength = chunkData.byteLength;
let lastError: unknown;
for (let attempt = 0; attempt <= MAX_CHUNK_RETRIES; attempt++) {
if (signal?.aborted) {
throw new DOMException('Upload cancelled', 'AbortError');
}
try {
const response = await http.put<UploadChunkResponse>({
url: Endpoints.CHANNEL_CHUNKED_UPLOAD_CHUNK(channelId, upload_id, chunkIndex),
body: chunkData,
headers: {'Content-Type': 'application/octet-stream'},
signal,
rejectWithError: true,
});
etags[chunkIndex] = {chunk_index: chunkIndex, etag: response.body.etag};
chunkProgress[chunkIndex] = chunkLength;
reportProgress();
return;
} catch (error) {
lastError = error;
if (signal?.aborted) {
throw error;
}
const isRetryable =
error instanceof Error &&
'status' in error &&
((error as {status: number}).status >= 500 || (error as {status: number}).status === 429);
if (!isRetryable || attempt === MAX_CHUNK_RETRIES) {
throw error;
}
const delay = RETRY_BASE_DELAY_MS * 2 ** attempt;
logger.debug(
`Chunk ${chunkIndex} failed (attempt ${attempt + 1}/${MAX_CHUNK_RETRIES + 1}), retrying in ${delay}ms`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
throw lastError;
}
await new Promise<void>((resolve, reject) => {
let settled = false;
function settle(error?: unknown) {
if (settled) return;
settled = true;
if (error) {
reject(error);
} else {
resolve();
}
}
function scheduleNext() {
while (activeTasks.length < MAX_CONCURRENT_CHUNKS && cursor < chunkIndices.length) {
const chunkIndex = chunkIndices[cursor++];
const task = uploadOneChunk(chunkIndex).then(
() => {
const idx = activeTasks.indexOf(task);
if (idx !== -1) activeTasks.splice(idx, 1);
if (cursor >= chunkIndices.length && activeTasks.length === 0) {
settle();
} else {
scheduleNext();
}
},
(error) => {
settle(error);
},
);
activeTasks.push(task);
}
}
scheduleNext();
});
logger.debug(`All ${chunk_count} chunks uploaded, completing upload`);
const completeResponse = await http.post<CompleteUploadResponse>({
url: Endpoints.CHANNEL_CHUNKED_UPLOAD_COMPLETE(channelId, upload_id),
body: {etags},
signal,
rejectWithError: true,
});
return {
upload_filename: completeResponse.body.upload_filename,
file_size: completeResponse.body.file_size,
content_type: completeResponse.body.content_type,
};
}
export function shouldUseChunkedUpload(file: File): boolean {
return file.size > CHUNKED_UPLOAD_CHUNK_SIZE;
}

View File

@@ -48,7 +48,6 @@ export interface CloudAttachment {
duration?: number | null;
waveform?: string | null;
isVoiceMessage?: boolean;
uploadedFilename?: string;
}
export interface MessageUpload {

View File

@@ -32,7 +32,6 @@ import {NSFWContentRejectedModal} from '@app/components/alerts/NSFWContentReject
import {SlowmodeRateLimitedModal} from '@app/components/alerts/SlowmodeRateLimitedModal';
import {Endpoints} from '@app/Endpoints';
import i18n from '@app/I18n';
import {shouldUseChunkedUpload, uploadFileChunked} from '@app/lib/ChunkedUploadService';
import {CloudUpload} from '@app/lib/CloudUpload';
import http, {type HttpResponse} from '@app/lib/HttpClient';
import type {HttpError} from '@app/lib/HttpError';
@@ -225,32 +224,6 @@ class MessageQueue extends Queue<MessageQueuePayload, HttpResponse<Message> | un
files = result.files;
}
if (hasAttachments && files?.length && attachments?.length) {
const abortController = new AbortController();
this.abortControllers.set(nonce, abortController);
try {
const chunkedResult = await this.performChunkedUploads(
channelId,
nonce,
files,
attachments,
abortController.signal,
);
files = chunkedResult.files;
attachments = chunkedResult.attachments;
} catch (error) {
this.abortControllers.delete(nonce);
const httpError = error as HttpError;
logger.error(`Chunked upload failed for channel ${channelId}:`, error);
this.handleSendError(channelId, nonce, httpError, i18n, payload.hasAttachments);
completed(null, undefined, error);
return;
}
this.abortControllers.delete(nonce);
}
const requestBody = buildMessageCreateRequest({
content: payload.content,
nonce,
@@ -321,77 +294,6 @@ class MessageQueue extends Queue<MessageQueuePayload, HttpResponse<Message> | un
}
}
private async performChunkedUploads(
channelId: string,
nonce: string,
files: Array<File>,
attachments: Array<ApiAttachmentMetadata>,
signal: AbortSignal,
): Promise<{files: Array<File>; attachments: Array<ApiAttachmentMetadata>}> {
const largeFileIndices = new Set<number>();
for (let i = 0; i < files.length; i++) {
if (shouldUseChunkedUpload(files[i])) {
largeFileIndices.add(i);
}
}
if (largeFileIndices.size === 0) {
return {files, attachments};
}
const totalChunkedSize = Array.from(largeFileIndices).reduce((sum, i) => sum + files[i].size, 0);
const totalOverallSize = files.reduce((sum, f) => sum + f.size, 0);
const chunkedRatio = totalOverallSize > 0 ? totalChunkedSize / totalOverallSize : 0;
const chunkedProgressWeight = chunkedRatio * 90;
const perFileProgress = new Map<number, number>();
for (const i of largeFileIndices) {
perFileProgress.set(i, 0);
}
const updatedAttachments = [...attachments];
await Promise.all(
Array.from(largeFileIndices).map(async (fileIndex) => {
const file = files[fileIndex];
const result = await uploadFileChunked(
channelId,
file,
(loaded, _total) => {
perFileProgress.set(fileIndex, loaded);
const totalLoaded = Array.from(perFileProgress.values()).reduce((s, v) => s + v, 0);
const ratio = totalChunkedSize > 0 ? totalLoaded / totalChunkedSize : 0;
const overallProgress = ratio * chunkedProgressWeight;
CloudUpload.updateSendingProgress(nonce, overallProgress);
},
signal,
);
if (updatedAttachments[fileIndex]) {
updatedAttachments[fileIndex] = {
...updatedAttachments[fileIndex],
uploaded_filename: result.upload_filename,
};
}
}),
);
const inlineFiles: Array<File> = [];
let inlineIndex = 0;
const remappedAttachments = updatedAttachments.map((att, originalIndex) => {
if (largeFileIndices.has(originalIndex)) {
return att;
}
const newId = String(inlineIndex);
inlineFiles.push(files[originalIndex]);
inlineIndex++;
return {...att, id: newId};
});
return {files: inlineFiles, attachments: remappedAttachments};
}
private async sendMultipartMessage(
channelId: string,
requestBody: MessageCreateRequest,

View File

@@ -39,7 +39,7 @@ export async function prepareAttachmentsForNonce(
throw new Error('No message upload found');
}
const inlineAttachments = messageUpload.attachments.filter((att) => !att.uploadedFilename);
const inlineAttachments = messageUpload.attachments;
const files = inlineAttachments.map((att) => att.file);
const attachments = favoriteMemeId ? undefined : mapMessageUploadAttachments(messageUpload.attachments);
@@ -55,6 +55,5 @@ export function mapMessageUploadAttachments(attachments: Array<CloudAttachment>)
flags: att.flags,
duration: att.duration != null ? Math.ceil(att.duration) : undefined,
waveform: att.waveform ?? undefined,
uploaded_filename: att.uploadedFilename,
}));
}

View File

@@ -34,7 +34,6 @@ export interface ApiAttachmentMetadata {
flags?: number;
duration?: number;
waveform?: string;
uploaded_filename?: string;
}
export interface MessageCreateRequest {