initial commit

This commit is contained in:
Hampus Kraft
2026-01-01 20:42:59 +00:00
commit 2f557eda8c
9029 changed files with 1490197 additions and 0 deletions

View File

@@ -0,0 +1,177 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import fs from 'node:fs/promises';
import {Stream} from 'node:stream';
import {HeadObjectCommand} from '@aws-sdk/client-s3';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import {temporaryFile} from 'tempy';
import * as v from 'valibot';
import {Config} from '~/Config';
import {Logger} from '~/Logger';
import {toBodyData, toWebReadableStream} from '~/lib/BinaryUtils';
import {createThumbnail} from '~/lib/FFmpegUtils';
import {parseRange, setHeaders} from '~/lib/HttpUtils';
import {processImage} from '~/lib/ImageProcessing';
import type {InMemoryCoalescer} from '~/lib/InMemoryCoalescer';
import type {HonoEnv} from '~/lib/MediaTypes';
import {SUPPORTED_MIME_TYPES} from '~/lib/MediaTypes';
import {validateMedia} from '~/lib/MediaValidation';
import {getMediaCategory, getMimeType} from '~/lib/MimeTypeUtils';
import {readS3Object, s3Client, streamS3Object} from '~/lib/S3Utils';
import {ExternalQuerySchema} from '~/schemas/ValidationSchemas';
export const createAttachmentsHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>): Promise<Response> => {
const {channel_id, attachment_id, filename} = ctx.req.param();
const {width, height, format, quality, animated} = v.parse(ExternalQuerySchema, ctx.req.query());
const key = `attachments/${channel_id}/${attachment_id}/${filename}`;
const isStreamableMedia = /\.(mp3|wav|ogg|flac|m4a|aac|opus|wma|mp4|webm|mov|avi|mkv|m4v)$/i.test(filename);
const hasTransformations = width || height || format || quality !== 'lossless' || animated;
if (
(isStreamableMedia && !hasTransformations) ||
(!width && !height && !format && quality === 'lossless' && !animated)
) {
try {
const headCommand = new HeadObjectCommand({
Bucket: Config.AWS_S3_BUCKET_CDN,
Key: key,
});
const headResponse = await s3Client.send(headCommand);
const totalSize = headResponse.ContentLength || 0;
const range = parseRange(ctx.req.header('Range') ?? '', totalSize);
let streamData: Stream;
let contentType: string;
let lastModified: Date | undefined;
if (range) {
const result = await readS3Object(Config.AWS_S3_BUCKET_CDN, key, range);
assert(result.data instanceof Stream, 'Expected range request to return a stream');
streamData = result.data;
contentType = result.contentType;
lastModified = result.lastModified;
} else {
const result = await streamS3Object(Config.AWS_S3_BUCKET_CDN, key);
streamData = result.stream;
contentType = result.contentType;
lastModified = result.lastModified;
}
setHeaders(ctx, totalSize, contentType, range, lastModified);
ctx.header('Content-Disposition', `attachment; filename="${encodeURIComponent(filename)}"`);
return new Response(toWebReadableStream(streamData), {
status: range ? 206 : 200,
headers: Object.fromEntries(ctx.res.headers),
});
} catch (error) {
Logger.error({error}, 'Failed to process attachment media');
throw new HTTPException(400);
}
}
const cacheKey = `${key}_${width}_${height}_${format}_${quality}_${animated}`;
const result = await coalescer.coalesce(cacheKey, async () => {
try {
const {data, contentType: originalContentType} = await readS3Object(Config.AWS_S3_BUCKET_CDN, key);
assert(data instanceof Buffer);
const mimeType = getMimeType(data, filename) || originalContentType;
if (mimeType && SUPPORTED_MIME_TYPES.has(mimeType)) {
await validateMedia(data, filename, ctx);
}
const mediaType = getMediaCategory(mimeType);
if (!mediaType) throw new HTTPException(400, {message: 'Invalid media type'});
if (mediaType === 'image') {
const metadata = await sharp(data).metadata();
const targetWidth = width ? Math.min(width, metadata.width || 0) : metadata.width || 0;
const targetHeight = height ? Math.min(height, metadata.height || 0) : metadata.height || 0;
const image = await processImage({
buffer: data,
width: targetWidth,
height: targetHeight,
format: format || metadata.format || '',
quality,
animated: (mimeType.endsWith('gif') || mimeType.endsWith('webp')) && animated,
});
const finalContentType = format ? getMimeType(Buffer.from(''), `image.${format}`) : originalContentType;
return {data: image, contentType: finalContentType || 'application/octet-stream'};
}
if (mediaType === 'video' && format) {
const ext = mimeType.split('/')[1];
const tempPath = temporaryFile({extension: ext});
ctx.get('tempFiles').push(tempPath);
await fs.writeFile(tempPath, data);
const thumbnailPath = await createThumbnail(tempPath);
ctx.get('tempFiles').push(thumbnailPath);
const thumbnailData = await fs.readFile(thumbnailPath);
const thumbMeta = await sharp(thumbnailData).metadata();
const targetWidth = width ? Math.min(width, thumbMeta.width || 0) : thumbMeta.width || 0;
const targetHeight = height ? Math.min(height, thumbMeta.height || 0) : thumbMeta.height || 0;
const processedThumbnail = await processImage({
buffer: thumbnailData,
width: targetWidth,
height: targetHeight,
format,
quality,
animated: false,
});
const contentType = getMimeType(Buffer.from(''), `image.${format}`);
if (!contentType) throw new HTTPException(400, {message: 'Unsupported image format'});
return {data: processedThumbnail, contentType};
}
throw new HTTPException(400, {message: 'Only images can be transformed via this endpoint'});
} catch (error) {
Logger.error({error}, 'Failed to process attachment media');
throw new HTTPException(400);
}
});
const range = parseRange(ctx.req.header('Range') ?? '', result.data.length);
setHeaders(ctx, result.data.length, result.contentType, range);
const downloadFilename = format ? filename.replace(/\.[^.]+$/, `.${format}`) : filename;
ctx.header('Content-Disposition', `attachment; filename="${encodeURIComponent(downloadFilename)}"`);
const fileData = range ? result.data.subarray(range.start, range.end + 1) : result.data;
return ctx.body(toBodyData(fileData));
};
};

View File

@@ -0,0 +1,175 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import fs from 'node:fs/promises';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import {temporaryFile} from 'tempy';
import * as v from 'valibot';
import {Config} from '~/Config';
import {Logger} from '~/Logger';
import {toBodyData} from '~/lib/BinaryUtils';
import {createThumbnail} from '~/lib/FFmpegUtils';
import {parseRange, setHeaders} from '~/lib/HttpUtils';
import {processImage} from '~/lib/ImageProcessing';
import type {InMemoryCoalescer} from '~/lib/InMemoryCoalescer';
import type {ErrorType, HonoEnv} from '~/lib/MediaTypes';
import {validateMedia} from '~/lib/MediaValidation';
import * as metrics from '~/lib/MetricsClient';
import {generateFilename, getMediaCategory, getMimeType} from '~/lib/MimeTypeUtils';
import {streamToBuffer} from '~/lib/S3Utils';
import {ExternalQuerySchema} from '~/schemas/ValidationSchemas';
import * as FetchUtils from '~/utils/FetchUtils';
import * as MediaProxyUtils from '~/utils/MediaProxyUtils';
const getErrorTypeFromUpstreamStatus = (status: number): ErrorType => {
if (status >= 500) return 'upstream_5xx';
if (status === 404) return 'not_found';
if (status === 403) return 'forbidden';
if (status === 401) return 'unauthorized';
return 'other';
};
const fetchAndValidate = async (
url: string,
ctx: Context<HonoEnv>,
): Promise<{buffer: Buffer; mimeType: string; filename: string}> => {
try {
const response = await FetchUtils.sendRequest({url});
if (response.status !== 200) {
const errorType = getErrorTypeFromUpstreamStatus(response.status);
metrics.counter({
name: 'media_proxy.external.upstream_error',
dimensions: {status: String(response.status), error_type: errorType},
});
ctx.set('metricsErrorContext', {errorType, errorSource: 'upstream'});
throw new Error(`Failed to fetch media: ${response.status}`);
}
const buffer = await streamToBuffer(response.stream);
const urlObj = new URL(url);
const filename = urlObj.pathname.substring(urlObj.pathname.lastIndexOf('/') + 1);
const mimeType = getMimeType(buffer, filename);
if (!mimeType) throw new HTTPException(400, {message: 'Unsupported file format'});
const effectiveFilename = filename?.includes('.') ? filename : generateFilename(mimeType, filename);
await validateMedia(buffer, effectiveFilename, ctx);
return {buffer, mimeType, filename: effectiveFilename};
} catch (error) {
if (error instanceof HTTPException) throw error;
if (error instanceof Error && 'isExpected' in error && error.isExpected) {
const httpError = error as Error & {errorType?: ErrorType};
if (httpError.errorType) {
ctx.set('metricsErrorContext', {errorType: httpError.errorType, errorSource: 'network'});
}
throw new HTTPException(400, {message: `Unable to fetch media: ${error.message}`});
}
throw error;
}
};
export const createExternalMediaHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>, path: string): Promise<Response> => {
const {width, height, format, quality, animated} = v.parse(ExternalQuerySchema, ctx.req.query());
const parts = path.split('/');
const signature = parts[0];
const proxyUrlPath = parts.slice(1).join('/');
if (!signature || !proxyUrlPath) throw new HTTPException(400);
if (!MediaProxyUtils.verifySignature(proxyUrlPath, signature, Config.SECRET_KEY)) {
throw new HTTPException(401);
}
const cacheKey = `${proxyUrlPath}_${signature}_${width}_${height}_${format}_${quality}_${animated}`;
const result = await coalescer.coalesce(cacheKey, async () => {
try {
const actualUrl = MediaProxyUtils.reconstructOriginalURL(proxyUrlPath);
const {buffer, mimeType} = await fetchAndValidate(actualUrl, ctx);
const mediaType = getMediaCategory(mimeType);
if (!mediaType) throw new HTTPException(400, {message: 'Invalid media type'});
if (mediaType === 'image') {
const metadata = await sharp(buffer).metadata();
const targetWidth = width ? Math.min(width, metadata.width || 0) : metadata.width || 0;
const targetHeight = height ? Math.min(height, metadata.height || 0) : metadata.height || 0;
const image = await processImage({
buffer,
width: targetWidth,
height: targetHeight,
format: format || metadata.format || '',
quality,
animated: (mimeType.endsWith('gif') || mimeType.endsWith('webp')) && animated,
});
const contentType = format ? getMimeType(Buffer.from(''), `image.${format}`) : mimeType;
return {data: image, contentType: contentType || 'application/octet-stream'};
}
if (mediaType === 'video' && format) {
const ext = mimeType.split('/')[1];
const tempPath = temporaryFile({extension: ext});
ctx.get('tempFiles').push(tempPath);
await fs.writeFile(tempPath, buffer);
const thumbnailPath = await createThumbnail(tempPath);
ctx.get('tempFiles').push(thumbnailPath);
const thumbnailData = await fs.readFile(thumbnailPath);
const thumbMeta = await sharp(thumbnailData).metadata();
const targetWidth = width ? Math.min(width, thumbMeta.width || 0) : thumbMeta.width || 0;
const targetHeight = height ? Math.min(height, thumbMeta.height || 0) : thumbMeta.height || 0;
const processedThumbnail = await processImage({
buffer: thumbnailData,
width: targetWidth,
height: targetHeight,
format,
quality,
animated: false,
});
const contentType = getMimeType(Buffer.from(''), `image.${format}`);
if (!contentType) throw new HTTPException(400, {message: 'Unsupported image format'});
return {data: processedThumbnail, contentType};
}
return {data: buffer, contentType: mimeType};
} catch (error) {
if (error instanceof HTTPException) throw error;
Logger.error({error}, 'Failed to process external media');
throw new HTTPException(400, {message: 'Failed to process media'});
}
});
const range = parseRange(ctx.req.header('Range') ?? '', result.data.length);
setHeaders(ctx, result.data.length, result.contentType, range);
const fileData = range ? result.data.subarray(range.start, range.end + 1) : result.data;
return ctx.body(toBodyData(fileData));
};
};

View File

@@ -0,0 +1,187 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import * as v from 'valibot';
import {Config} from '~/Config';
import {toBodyData} from '~/lib/BinaryUtils';
import {parseRange, setHeaders} from '~/lib/HttpUtils';
import {processImage} from '~/lib/ImageProcessing';
import type {InMemoryCoalescer} from '~/lib/InMemoryCoalescer';
import type {HonoEnv} from '~/lib/MediaTypes';
import {MEDIA_TYPES} from '~/lib/MediaTypes';
import {getMimeType} from '~/lib/MimeTypeUtils';
import {readS3Object} from '~/lib/S3Utils';
import {ImageParamSchema, ImageQuerySchema} from '~/schemas/ValidationSchemas';
const stripAnimationPrefix = (hash: string) => (hash.startsWith('a_') ? hash.substring(2) : hash);
const processImageRequest = async (params: {
coalescer: InMemoryCoalescer;
ctx: Context<HonoEnv>;
cacheKey: string;
s3Key: string;
ext: string;
aspectRatio: number;
size: string;
quality: string;
animated: boolean;
}): Promise<Response> => {
const {coalescer, ctx, cacheKey, s3Key, ext, aspectRatio, size, quality, animated} = params;
const result = await coalescer.coalesce(cacheKey, async () => {
const {data} = await readS3Object(Config.AWS_S3_BUCKET_CDN, s3Key);
assert(data instanceof Buffer);
const metadata = await sharp(data).metadata();
const requestedWidth = Number(size);
const originalAspectRatio = (metadata.width || 1) / (metadata.height || 1);
const effectiveAspectRatio = aspectRatio === 0 ? originalAspectRatio : aspectRatio;
const requestedHeight = Math.floor(requestedWidth / effectiveAspectRatio);
const width = Math.min(requestedWidth, metadata.width || 0);
const height = Math.min(requestedHeight, metadata.height || 0);
const image = await processImage({
buffer: data,
width,
height,
format: ext,
quality,
animated: ext === 'gif' || (ext === 'webp' && animated),
});
const mimeType = getMimeType(Buffer.from(''), `image.${ext}`) || 'application/octet-stream';
return {data: image, contentType: mimeType};
});
const range = parseRange(ctx.req.header('Range') ?? '', result.data.length);
setHeaders(ctx, result.data.length, result.contentType, range);
const fileData = range ? result.data.subarray(range.start, range.end + 1) : result.data;
return ctx.body(toBodyData(fileData));
};
export const createImageRouteHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>, pathPrefix: string, aspectRatio = 0): Promise<Response> => {
const {id, filename} = v.parse(ImageParamSchema, ctx.req.param());
const {size, quality, animated} = v.parse(ImageQuerySchema, ctx.req.query());
const parts = filename.split('.');
if (parts.length !== 2 || !MEDIA_TYPES.IMAGE.extensions.includes(parts[1])) {
throw new HTTPException(400);
}
const [hash, ext] = parts;
const strippedHash = stripAnimationPrefix(hash);
const cacheKey = `${pathPrefix}_${id}_${hash}_${ext}_${size}_${quality}_${aspectRatio}_${animated}`;
const s3Key = `${pathPrefix}/${id}/${strippedHash}`;
return processImageRequest({coalescer, ctx, cacheKey, s3Key, ext, aspectRatio, size, quality, animated});
};
};
export const createGuildMemberImageRouteHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>, pathPrefix: string, aspectRatio = 0): Promise<Response> => {
const {guild_id, user_id, filename} = ctx.req.param();
const {size, quality, animated} = v.parse(ImageQuerySchema, ctx.req.query());
const parts = filename.split('.');
if (parts.length !== 2 || !MEDIA_TYPES.IMAGE.extensions.includes(parts[1])) {
throw new HTTPException(400);
}
const [hash, ext] = parts;
const strippedHash = stripAnimationPrefix(hash);
const cacheKey = `${pathPrefix}_${guild_id}_${user_id}_${hash}_${ext}_${size}_${quality}_${aspectRatio}_${animated}`;
const s3Key = `guilds/${guild_id}/users/${user_id}/${pathPrefix}/${strippedHash}`;
return processImageRequest({coalescer, ctx, cacheKey, s3Key, ext, aspectRatio, size, quality, animated});
};
};
const processSimpleImageRequest = async (params: {
coalescer: InMemoryCoalescer;
ctx: Context<HonoEnv>;
cacheKey: string;
s3Key: string;
ext: string;
aspectRatio: number;
size: string;
quality: string;
animated: boolean;
}): Promise<Response> => {
const {coalescer, ctx, cacheKey, s3Key, ext, aspectRatio, size, quality, animated} = params;
const result = await coalescer.coalesce(cacheKey, async () => {
const {data} = await readS3Object(Config.AWS_S3_BUCKET_CDN, s3Key);
assert(data instanceof Buffer);
const metadata = await sharp(data).metadata();
const requestedWidth = Number(size);
const originalAspectRatio = (metadata.width || 1) / (metadata.height || 1);
const effectiveAspectRatio = aspectRatio === 0 ? originalAspectRatio : aspectRatio;
const requestedHeight = Math.floor(requestedWidth / effectiveAspectRatio);
const width = Math.min(requestedWidth, metadata.width || 0);
const height = Math.min(requestedHeight, metadata.height || 0);
const shouldAnimate = ext === 'gif' ? true : ext === 'webp' && animated;
const image = await sharp(data, {animated: shouldAnimate})
.resize(width, height, {
fit: 'contain',
background: {r: 255, g: 255, b: 255, alpha: 0},
withoutEnlargement: true,
})
.toFormat(ext as keyof sharp.FormatEnum, {
quality: quality === 'high' ? 80 : quality === 'low' ? 20 : 100,
})
.toBuffer();
const mimeType = getMimeType(Buffer.from(''), `image.${ext}`) || 'application/octet-stream';
return {data: image, contentType: mimeType};
});
const range = parseRange(ctx.req.header('Range') ?? '', result.data.length);
setHeaders(ctx, result.data.length, result.contentType, range);
const fileData = range ? result.data.subarray(range.start, range.end + 1) : result.data;
return ctx.body(toBodyData(fileData));
};
export const createSimpleImageRouteHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>, pathPrefix: string, aspectRatio = 0): Promise<Response> => {
const {id} = ctx.req.param();
const {size, quality, animated} = v.parse(ImageQuerySchema, ctx.req.query());
const parts = id.split('.');
if (parts.length !== 2 || !MEDIA_TYPES.IMAGE.extensions.includes(parts[1])) {
throw new HTTPException(400);
}
const [filename, ext] = parts;
const cacheKey = `${pathPrefix}_${filename}_${ext}_${size}_${quality}_${aspectRatio}_${animated}`;
const s3Key = `${pathPrefix}/${filename}`;
return processSimpleImageRequest({coalescer, ctx, cacheKey, s3Key, ext, aspectRatio, size, quality, animated});
};
};

View File

@@ -0,0 +1,167 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import crypto from 'node:crypto';
import fs from 'node:fs/promises';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import {temporaryFile} from 'tempy';
import {Config} from '~/Config';
import {Logger} from '~/Logger';
import {createThumbnail} from '~/lib/FFmpegUtils';
import type {InMemoryCoalescer} from '~/lib/InMemoryCoalescer';
import type {HonoEnv} from '~/lib/MediaTypes';
import {processMetadata, validateMedia} from '~/lib/MediaValidation';
import {generateFilename, getMimeType} from '~/lib/MimeTypeUtils';
import type {NSFWDetectionService} from '~/lib/NSFWDetectionService';
import {readS3Object, streamToBuffer} from '~/lib/S3Utils';
import * as FetchUtils from '~/utils/FetchUtils';
type MediaProxyMetadataRequest =
| {type: 'external'; url: string; with_base64?: boolean; isNSFWAllowed: boolean}
| {type: 'upload'; upload_filename: string; isNSFWAllowed: boolean}
| {type: 'base64'; base64: string; isNSFWAllowed: boolean}
| {type: 's3'; bucket: string; key: string; with_base64?: boolean; isNSFWAllowed: boolean};
export const handleMetadataRequest = (coalescer: InMemoryCoalescer, nsfwDetectionService: NSFWDetectionService) => {
return async (ctx: Context<HonoEnv>) => {
const request = await ctx.req.json<MediaProxyMetadataRequest>();
const cacheKey = (() => {
switch (request.type) {
case 'base64':
return `base64_${request.base64}`;
case 'upload':
return `upload_${request.upload_filename}`;
case 'external':
return `external_${request.url}_${request.with_base64}`;
case 's3':
return `s3_${request.bucket}_${request.key}_${request.with_base64}`;
}
})();
return coalescer.coalesce(cacheKey, async () => {
const {buffer, filename} = await (async () => {
switch (request.type) {
case 'base64':
return {buffer: Buffer.from(request.base64!, 'base64'), filename: undefined};
case 'upload': {
const {data} = await readS3Object(Config.AWS_S3_BUCKET_UPLOADS, request.upload_filename!);
assert(data instanceof Buffer);
return {buffer: data, filename: request.upload_filename};
}
case 's3': {
const {data} = await readS3Object(request.bucket!, request.key!);
assert(data instanceof Buffer);
const filename = request.key!.substring(request.key!.lastIndexOf('/') + 1);
return {buffer: data, filename: filename || undefined};
}
case 'external': {
try {
const response = await FetchUtils.sendRequest({url: request.url!});
if (response.status !== 200) throw new HTTPException(400, {message: 'Failed to fetch media'});
const url = new URL(request.url!);
const filename = url.pathname.substring(url.pathname.lastIndexOf('/') + 1);
return {buffer: await streamToBuffer(response.stream), filename: filename || undefined};
} catch (error) {
if (error instanceof Error && 'isExpected' in error && error.isExpected) {
throw new HTTPException(400, {message: `Unable to fetch media: ${error.message}`});
}
throw error;
}
}
default:
throw new HTTPException(400, {message: 'Invalid request type'});
}
})();
let effectiveFilename = filename;
if (!effectiveFilename && request.type === 'base64') {
try {
const detectedMime = getMimeType(buffer);
if (detectedMime) {
effectiveFilename = generateFilename(detectedMime);
} else {
const metadata = await sharp(buffer).metadata();
if (metadata.format) effectiveFilename = `image.${metadata.format}`;
}
} catch (error) {
Logger.error({error}, 'Failed to detect format of base64 data');
throw new HTTPException(400, {message: 'Invalid or corrupt media data'});
}
}
if (!effectiveFilename) {
throw new HTTPException(400, {message: 'Cannot determine file type'});
}
const mimeType = await validateMedia(buffer, effectiveFilename, ctx);
const metadata = await processMetadata(ctx, mimeType, buffer);
const contentHash = crypto.createHash('sha256').update(buffer).digest('hex');
let nsfw = false;
let nsfwProbability = 0;
let nsfwPredictions: Record<string, number> = {};
if (!request.isNSFWAllowed) {
const isImageOrVideo = mimeType.startsWith('image/') || mimeType.startsWith('video/');
if (isImageOrVideo) {
try {
let checkBuffer = buffer;
if (mimeType.startsWith('video/')) {
const videoExtension = mimeType.split('/')[1] ?? 'tmp';
const tempPath = temporaryFile({extension: videoExtension});
ctx.get('tempFiles').push(tempPath);
await fs.writeFile(tempPath, buffer);
const thumbnailPath = await createThumbnail(tempPath);
ctx.get('tempFiles').push(thumbnailPath);
checkBuffer = await fs.readFile(thumbnailPath);
}
const nsfwResult = await nsfwDetectionService.checkNSFWBuffer(checkBuffer);
nsfw = nsfwResult.isNSFW;
nsfwProbability = nsfwResult.probability;
nsfwPredictions = nsfwResult.predictions ?? {};
} catch (error) {
Logger.error({error}, 'Failed to perform NSFW detection');
}
}
}
return ctx.json({
...metadata,
content_type: mimeType,
content_hash: contentHash,
nsfw,
nsfw_probability: nsfwProbability,
nsfw_predictions: nsfwPredictions,
base64:
(request.type === 'external' || request.type === 's3') && request.with_base64
? buffer.toString('base64')
: undefined,
});
});
};
};

View File

@@ -0,0 +1,47 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import {Config} from '~/Config';
import {toBodyData} from '~/lib/BinaryUtils';
import {setHeaders} from '~/lib/HttpUtils';
import type {HonoEnv} from '~/lib/MediaTypes';
import {readS3Object} from '~/lib/S3Utils';
export const handleStaticProxyRequest = async (ctx: Context<HonoEnv>): Promise<Response> => {
const bucket = Config.AWS_S3_BUCKET_STATIC;
const path = ctx.req.path;
if (!bucket || path === '/') {
return ctx.text('Not Found', 404);
}
const key = path.replace(/^\/+/, '');
try {
const {data, size, contentType, lastModified} = await readS3Object(bucket, key);
assert(Buffer.isBuffer(data));
setHeaders(ctx, size, contentType, null, lastModified);
return ctx.body(toBodyData(data));
} catch (error) {
if (error instanceof HTTPException) {
throw error;
}
return ctx.text('Not Found', 404);
}
};

View File

@@ -0,0 +1,97 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import * as v from 'valibot';
import {Config} from '~/Config';
import {toBodyData} from '~/lib/BinaryUtils';
import {parseRange, setHeaders} from '~/lib/HttpUtils';
import type {InMemoryCoalescer} from '~/lib/InMemoryCoalescer';
import type {HonoEnv} from '~/lib/MediaTypes';
import {getMimeType} from '~/lib/MimeTypeUtils';
import {readS3Object} from '~/lib/S3Utils';
import {ImageQuerySchema} from '~/schemas/ValidationSchemas';
const STICKER_EXTENSIONS = ['gif', 'webp'];
const processStickerRequest = async (params: {
coalescer: InMemoryCoalescer;
ctx: Context<HonoEnv>;
cacheKey: string;
s3Key: string;
ext: string;
size: string;
quality: string;
animated: boolean;
}): Promise<Response> => {
const {coalescer, ctx, cacheKey, s3Key, ext, size, quality, animated} = params;
const result = await coalescer.coalesce(cacheKey, async () => {
const {data} = await readS3Object(Config.AWS_S3_BUCKET_CDN, s3Key);
assert(data instanceof Buffer);
const metadata = await sharp(data).metadata();
const requestedSize = Number(size);
const width = Math.min(requestedSize, metadata.width || 0);
const height = Math.min(requestedSize, metadata.height || 0);
const shouldAnimate = ext === 'gif' ? true : ext === 'webp' && animated;
const image = await sharp(data, {animated: shouldAnimate})
.resize(width, height, {
fit: 'contain',
background: {r: 255, g: 255, b: 255, alpha: 0},
withoutEnlargement: true,
})
.toFormat(ext as keyof sharp.FormatEnum, {
quality: quality === 'high' ? 80 : quality === 'low' ? 20 : 100,
})
.toBuffer();
const mimeType = getMimeType(Buffer.from(''), `image.${ext}`) || 'application/octet-stream';
return {data: image, contentType: mimeType};
});
const range = parseRange(ctx.req.header('Range') ?? '', result.data.length);
setHeaders(ctx, result.data.length, result.contentType, range);
const fileData = range ? result.data.subarray(range.start, range.end + 1) : result.data;
return ctx.body(toBodyData(fileData));
};
export const createStickerRouteHandler = (coalescer: InMemoryCoalescer) => {
return async (ctx: Context<HonoEnv>): Promise<Response> => {
const {id} = ctx.req.param();
const {size, quality, animated} = v.parse(ImageQuerySchema, ctx.req.query());
const parts = id.split('.');
if (parts.length !== 2 || !STICKER_EXTENSIONS.includes(parts[1])) {
throw new HTTPException(400);
}
const [filename, ext] = parts;
const cacheKey = `stickers_${filename}_${ext}_${size}_${quality}_${animated}`;
const s3Key = `stickers/${filename}`;
return processStickerRequest({coalescer, ctx, cacheKey, s3Key, ext, size, quality, animated});
};
};

View File

@@ -0,0 +1,75 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {PassThrough} from 'node:stream';
import type {Context} from 'hono';
import {Config} from '~/Config';
import {toBodyData, toWebReadableStream} from '~/lib/BinaryUtils';
import type {HonoEnv} from '~/lib/MediaTypes';
import {headS3Object, readS3Object} from '~/lib/S3Utils';
const THEME_ID_PATTERN = /^[a-f0-9]{16}$/;
export async function handleThemeHeadRequest(ctx: Context<HonoEnv>): Promise<Response> {
const filename = ctx.req.param('id.css');
const themeId = filename?.replace(/\.css$/, '');
if (!themeId || !THEME_ID_PATTERN.test(themeId)) {
return ctx.text('Not found', {status: 404});
}
const {contentLength, lastModified} = await headS3Object(Config.AWS_S3_BUCKET_CDN, `themes/${themeId}.css`);
ctx.header('Content-Type', 'text/css; charset=utf-8');
ctx.header('Cache-Control', 'public, max-age=31536000, immutable');
ctx.header('Access-Control-Allow-Origin', '*');
ctx.header('Content-Length', contentLength.toString());
if (lastModified) {
ctx.header('Last-Modified', lastModified.toUTCString());
}
return ctx.body(null);
}
export async function handleThemeRequest(ctx: Context<HonoEnv>): Promise<Response> {
const filename = ctx.req.param('id.css');
const themeId = filename?.replace(/\.css$/, '');
if (!themeId || !THEME_ID_PATTERN.test(themeId)) {
return ctx.text('Not found', {status: 404});
}
const {data, lastModified} = await readS3Object(Config.AWS_S3_BUCKET_CDN, `themes/${themeId}.css`);
ctx.header('Content-Type', 'text/css; charset=utf-8');
ctx.header('Cache-Control', 'public, max-age=31536000, immutable');
ctx.header('Access-Control-Allow-Origin', '*');
if (lastModified) {
ctx.header('Last-Modified', new Date(lastModified).toUTCString());
}
if (data instanceof PassThrough) {
return ctx.body(toWebReadableStream(data));
} else {
ctx.header('Content-Length', data.length.toString());
return ctx.body(toBodyData(data));
}
}

View File

@@ -0,0 +1,79 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import assert from 'node:assert/strict';
import fs from 'node:fs/promises';
import type {Context} from 'hono';
import {HTTPException} from 'hono/http-exception';
import sharp from 'sharp';
import {temporaryFile} from 'tempy';
import * as v from 'valibot';
import {Config} from '~/Config';
import {Logger} from '~/Logger';
import {toBodyData} from '~/lib/BinaryUtils';
import {createThumbnail} from '~/lib/FFmpegUtils';
import type {HonoEnv} from '~/lib/MediaTypes';
import {getMediaCategory, getMimeType} from '~/lib/MimeTypeUtils';
import {readS3Object} from '~/lib/S3Utils';
const ThumbnailRequestSchema = v.object({
type: v.literal('upload'),
upload_filename: v.string(),
});
export const handleThumbnailRequest = async (ctx: Context<HonoEnv>): Promise<Response> => {
try {
const body = await ctx.req.json();
const {upload_filename} = v.parse(ThumbnailRequestSchema, body);
const {data} = await readS3Object(Config.AWS_S3_BUCKET_UPLOADS, upload_filename);
assert(data instanceof Buffer);
const mimeType = getMimeType(data, upload_filename);
if (!mimeType) {
throw new HTTPException(400, {message: 'Unable to determine file type'});
}
const mediaType = getMediaCategory(mimeType);
if (mediaType !== 'video') {
throw new HTTPException(400, {message: 'Not a video file'});
}
const ext = mimeType.split('/')[1] || 'mp4';
const tempVideoPath = temporaryFile({extension: ext});
ctx.get('tempFiles').push(tempVideoPath);
await fs.writeFile(tempVideoPath, data);
const thumbnailPath = await createThumbnail(tempVideoPath);
ctx.get('tempFiles').push(thumbnailPath);
const thumbnailData = await fs.readFile(thumbnailPath);
const processedThumbnail = await sharp(thumbnailData).jpeg({quality: 80}).toBuffer();
return ctx.body(toBodyData(processedThumbnail), {
headers: {
'Content-Type': 'image/jpeg',
},
});
} catch (error) {
Logger.error({error}, 'Failed to generate thumbnail');
throw new HTTPException(404);
}
};