refactor progress

This commit is contained in:
Hampus Kraft
2026-02-17 12:22:36 +00:00
parent cb31608523
commit d5abd1a7e4
8257 changed files with 1190207 additions and 761040 deletions

90
packages/s3/src/App.tsx Normal file
View File

@@ -0,0 +1,90 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {MetricsCollector} from '@fluxer/hono_types/src/MetricsTypes';
import type {TracingOptions} from '@fluxer/hono_types/src/TracingTypes';
import type {LoggerInterface} from '@fluxer/logger/src/LoggerInterface';
import type {RateLimitService} from '@fluxer/rate_limit/src/RateLimitService';
import type {S3RateLimitConfig} from '@fluxer/s3/src/app/S3AppConfigTypes';
import {setupS3ErrorHandling} from '@fluxer/s3/src/app/S3ErrorHandling';
import {setupS3Middleware} from '@fluxer/s3/src/app/S3MiddlewareSetup';
import {resolveS3RateLimitService} from '@fluxer/s3/src/app/S3RateLimitResolver';
import {setupS3ResponseHeadersMiddleware} from '@fluxer/s3/src/app/S3ResponseHeadersMiddleware';
import {registerS3Routes} from '@fluxer/s3/src/app/S3RouteRegistrar';
import type {S3AuthConfig} from '@fluxer/s3/src/middleware/S3AuthMiddleware';
import type {S3ServiceConfig} from '@fluxer/s3/src/s3/S3Service';
import {S3Service} from '@fluxer/s3/src/s3/S3Service';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import {Hono} from 'hono';
export interface CreateS3AppOptions {
logger: LoggerInterface;
s3Config: S3ServiceConfig;
authConfig: S3AuthConfig;
metricsCollector?: MetricsCollector;
tracing?: TracingOptions;
kvUrl?: string;
rateLimitService?: RateLimitService | null;
rateLimitConfig?: S3RateLimitConfig | null;
}
export interface S3AppResult {
app: Hono<HonoEnv>;
getS3Service: () => S3Service;
initialize: () => Promise<void>;
shutdown: () => void;
}
export function createS3App(options: CreateS3AppOptions): S3AppResult {
const {logger, s3Config, authConfig, metricsCollector, tracing, kvUrl, rateLimitService, rateLimitConfig} = options;
const s3Service = new S3Service(s3Config, logger);
const resolvedRateLimitService = resolveS3RateLimitService({
kvUrl,
rateLimitService,
rateLimitConfig,
});
const initialize = async (): Promise<void> => {
await s3Service.initialize();
};
const shutdown = (): void => {};
const app = new Hono<HonoEnv>();
setupS3Middleware({
app,
logger,
s3Service,
authConfig,
metricsCollector,
tracing,
rateLimitService: resolvedRateLimitService,
rateLimitConfig,
});
setupS3ResponseHeadersMiddleware(app);
registerS3Routes(app);
setupS3ErrorHandling({app, logger});
return {
app,
getS3Service: () => s3Service,
initialize,
shutdown,
};
}

View File

@@ -0,0 +1,137 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fs from 'node:fs/promises';
import {createMockLogger} from '@fluxer/logger/src/mock';
import {createS3App} from '@fluxer/s3/src/App';
import {beforeEach, describe, expect, it, vi} from 'vitest';
const testRoot = `/tmp/fluxer-s3-test-app-${Date.now()}`;
const mockLogger = createMockLogger();
const authConfig = {
accessKey: 'test-access-key',
secretKey: 'test-secret-key',
};
beforeEach(async () => {
vi.clearAllMocks();
await fs.rm(testRoot, {recursive: true, force: true});
});
describe('createS3App', () => {
describe('health endpoint', () => {
it('should respond to /_health and apply response headers', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig,
});
await initialize();
const res = await app.request('http://localhost/_health', {
headers: {
origin: 'http://example.com',
'x-amz-request-id': 'test-id',
},
});
expect(res.status).toBe(200);
expect(res.headers.get('x-amz-request-id')).toBe('test-id');
expect(res.headers.get('x-amz-id-2')).toBe('test-id');
expect(res.headers.get('Server')).toBe('FluxerS3');
expect(res.headers.get('Access-Control-Allow-Origin')).toBe('http://example.com');
});
it('should generate a request id when missing and omit CORS headers when no origin', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig,
});
await initialize();
const res = await app.request('http://localhost/_health');
expect(res.status).toBe(200);
expect(res.headers.get('x-amz-request-id')).toMatch(/^[0-9a-f-]{16,}$/i);
expect(res.headers.get('Access-Control-Allow-Origin')).toBeNull();
});
});
describe('authentication required', () => {
it('should reject unauthenticated requests to non-health endpoints', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig,
});
await initialize();
const res = await app.request('http://localhost/test-bucket/test-key');
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
});
it('should return S3-style error for unknown routes without auth', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig,
});
await initialize();
const res = await app.request('http://localhost/somewhere', {method: 'PATCH'});
expect(res.status).toBe(403);
const xml = await res.text();
expect(xml).toContain('<Code>AccessDenied</Code>');
});
it('should reject unauthenticated OPTIONS requests', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig,
});
await initialize();
const res = await app.request('http://localhost/anything', {method: 'OPTIONS'});
expect(res.status).toBe(403);
});
});
describe('no credentials configured', () => {
it('should reject all requests when credentials are not configured', async () => {
const {app, initialize} = createS3App({
logger: mockLogger,
s3Config: {root: testRoot, buckets: []},
authConfig: {},
});
const errorSpy = vi.spyOn(mockLogger, 'error');
await initialize();
const res = await app.request('http://localhost/test-bucket/test-key');
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
expect(errorSpy).toHaveBeenCalledWith(expect.stringContaining('S3 credentials not configured'));
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,107 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {hmacSha256, md5, md5Base64, randomHex, randomUUID, sha256} from '@fluxer/s3/src/utils/Crypto';
import {describe, expect, it} from 'vitest';
describe('Crypto', () => {
describe('hmacSha256', () => {
it('should generate HMAC-SHA256 with string key', () => {
const result = hmacSha256('secret', 'message');
expect(result).toBeInstanceOf(Buffer);
expect(result.toString('hex')).toHaveLength(64);
});
it('should generate HMAC-SHA256 with buffer key', () => {
const result = hmacSha256(Buffer.from('secret'), 'message');
expect(result).toBeInstanceOf(Buffer);
});
it('should be deterministic', () => {
const result1 = hmacSha256('key', 'data');
const result2 = hmacSha256('key', 'data');
expect(result1.equals(result2)).toBe(true);
});
});
describe('sha256', () => {
it('should generate SHA256 hash from string', () => {
const result = sha256('hello');
expect(result).toHaveLength(64);
expect(result).toBe('2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824');
});
it('should generate SHA256 hash from buffer', () => {
const result = sha256(Buffer.from('hello'));
expect(result).toBe('2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824');
});
});
describe('md5', () => {
it('should generate MD5 hash from string', () => {
const result = md5('hello');
expect(result).toHaveLength(32);
expect(result).toBe('5d41402abc4b2a76b9719d911017c592');
});
it('should generate MD5 hash from buffer', () => {
const result = md5(Buffer.from('hello'));
expect(result).toBe('5d41402abc4b2a76b9719d911017c592');
});
});
describe('md5Base64', () => {
it('should generate MD5 hash in base64 from string', () => {
const result = md5Base64('hello');
expect(result).toBe('XUFAKrxLKna5cZ2REBfFkg==');
});
it('should generate MD5 hash in base64 from buffer', () => {
const result = md5Base64(Buffer.from('hello'));
expect(result).toBe('XUFAKrxLKna5cZ2REBfFkg==');
});
});
describe('randomHex', () => {
it('should generate hex string of correct length', () => {
const result = randomHex(16);
expect(result).toHaveLength(32);
expect(result).toMatch(/^[0-9a-f]+$/);
});
it('should generate different values', () => {
const result1 = randomHex(16);
const result2 = randomHex(16);
expect(result1).not.toBe(result2);
});
});
describe('randomUUID', () => {
it('should generate valid UUID', () => {
const result = randomUUID();
expect(result).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/);
});
it('should generate unique UUIDs', () => {
const result1 = randomUUID();
const result2 = randomUUID();
expect(result1).not.toBe(result2);
});
});
});

View File

@@ -0,0 +1,221 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {PresignedUrlOptions} from '@fluxer/s3/src/s3/PresignedUrlGenerator';
import {generatePresignedUrl} from '@fluxer/s3/src/s3/PresignedUrlGenerator';
import {describe, expect, it} from 'vitest';
describe('generatePresignedUrl', () => {
const baseOptions: PresignedUrlOptions = {
method: 'GET',
bucket: 'test-bucket',
key: 'test-key.txt',
expiresIn: 300,
accessKey: 'test-access-key',
secretKey: 'test-secret-key',
endpoint: 'http://localhost:8080',
region: 'us-east-1',
};
describe('URL structure', () => {
it('should generate URL with correct endpoint and path', () => {
const url = generatePresignedUrl(baseOptions);
expect(url).toContain('http://localhost:8080/test-bucket/test-key.txt?');
});
it('should include all required AWS signature parameters', () => {
const url = new URL(generatePresignedUrl(baseOptions));
expect(url.searchParams.get('X-Amz-Algorithm')).toBe('AWS4-HMAC-SHA256');
expect(url.searchParams.get('X-Amz-Credential')).toBeTruthy();
expect(url.searchParams.get('X-Amz-Date')).toBeTruthy();
expect(url.searchParams.get('X-Amz-Expires')).toBe('300');
expect(url.searchParams.get('X-Amz-SignedHeaders')).toBe('host');
expect(url.searchParams.get('X-Amz-Signature')).toBeTruthy();
});
});
describe('credential format', () => {
it('should format credential with correct structure', () => {
const url = new URL(generatePresignedUrl(baseOptions));
const credential = url.searchParams.get('X-Amz-Credential');
expect(credential).toMatch(/^test-access-key\/\d{8}\/us-east-1\/s3\/aws4_request$/);
});
it('should use current date in credential', () => {
const url = new URL(generatePresignedUrl(baseOptions));
const credential = url.searchParams.get('X-Amz-Credential');
const dateStamp = credential?.split('/')[1];
const today = new Date().toISOString().slice(0, 10).replace(/-/g, '');
expect(dateStamp).toBe(today);
});
});
describe('signature', () => {
it('should generate same signature for same inputs', () => {
const url1 = generatePresignedUrl(baseOptions);
const url2 = generatePresignedUrl(baseOptions);
expect(url1).toBe(url2);
});
it('should generate different signatures for different access keys', () => {
const opts1 = {...baseOptions, accessKey: 'key1'};
const opts2 = {...baseOptions, accessKey: 'key2'};
const sig1 = new URL(generatePresignedUrl(opts1)).searchParams.get('X-Amz-Signature');
const sig2 = new URL(generatePresignedUrl(opts2)).searchParams.get('X-Amz-Signature');
expect(sig1).not.toBe(sig2);
});
it('should generate different signatures for different secret keys', () => {
const opts1 = {...baseOptions, secretKey: 'secret1'};
const opts2 = {...baseOptions, secretKey: 'secret2'};
const sig1 = new URL(generatePresignedUrl(opts1)).searchParams.get('X-Amz-Signature');
const sig2 = new URL(generatePresignedUrl(opts2)).searchParams.get('X-Amz-Signature');
expect(sig1).not.toBe(sig2);
});
it('should generate different signatures for different expiration times', () => {
const opts1 = {...baseOptions, expiresIn: 300};
const opts2 = {...baseOptions, expiresIn: 600};
const sig1 = new URL(generatePresignedUrl(opts1)).searchParams.get('X-Amz-Signature');
const sig2 = new URL(generatePresignedUrl(opts2)).searchParams.get('X-Amz-Signature');
expect(sig1).not.toBe(sig2);
});
});
describe('HTTP methods', () => {
it('should support GET method', () => {
const opts = {...baseOptions, method: 'GET' as const};
const url = generatePresignedUrl(opts);
expect(url).toBeTruthy();
expect(url).toContain('test-bucket/test-key.txt?');
});
it('should support PUT method', () => {
const opts = {...baseOptions, method: 'PUT' as const};
const url = generatePresignedUrl(opts);
expect(url).toBeTruthy();
const signature = new URL(url).searchParams.get('X-Amz-Signature');
expect(signature).toBeTruthy();
expect(signature?.length).toBeGreaterThan(0);
});
it('should support DELETE method', () => {
const opts = {...baseOptions, method: 'DELETE' as const};
const url = generatePresignedUrl(opts);
expect(url).toBeTruthy();
const signature = new URL(url).searchParams.get('X-Amz-Signature');
expect(signature).toBeTruthy();
});
it('should generate different signatures for different methods', () => {
const urlGet = generatePresignedUrl({...baseOptions, method: 'GET'});
const urlPut = generatePresignedUrl({...baseOptions, method: 'PUT'});
const urlDelete = generatePresignedUrl({...baseOptions, method: 'DELETE'});
const sigGet = new URL(urlGet).searchParams.get('X-Amz-Signature');
const sigPut = new URL(urlPut).searchParams.get('X-Amz-Signature');
const sigDelete = new URL(urlDelete).searchParams.get('X-Amz-Signature');
expect(sigGet).not.toBe(sigPut);
expect(sigGet).not.toBe(sigDelete);
expect(sigPut).not.toBe(sigDelete);
});
});
describe('regions', () => {
it('should default to us-east-1 region', () => {
const opts = {...baseOptions, region: undefined};
const url = new URL(generatePresignedUrl(opts));
const credential = url.searchParams.get('X-Amz-Credential');
expect(credential).toContain('/us-east-1/s3/aws4_request');
});
it('should use custom region when specified', () => {
const opts = {...baseOptions, region: 'eu-west-1'};
const url = new URL(generatePresignedUrl(opts));
const credential = url.searchParams.get('X-Amz-Credential');
expect(credential).toContain('/eu-west-1/s3/aws4_request');
});
});
describe('bucket and key handling', () => {
it('should handle keys with special characters', () => {
const opts = {...baseOptions, key: 'path/to/file with spaces.txt'};
const url = generatePresignedUrl(opts);
expect(url).toContain('path/to/file with spaces.txt?');
});
it('should handle keys with forward slashes', () => {
const opts = {...baseOptions, key: 'folder/subfolder/file.txt'};
const url = generatePresignedUrl(opts);
expect(url).toContain('/test-bucket/folder/subfolder/file.txt?');
});
it('should handle bucket names with periods', () => {
const opts = {...baseOptions, bucket: 'my.bucket.com'};
const url = generatePresignedUrl(opts);
expect(url).toContain('/my.bucket.com/');
});
});
describe('endpoint handling', () => {
it('should handle http endpoint', () => {
const opts = {...baseOptions, endpoint: 'http://example.com'};
const url = new URL(generatePresignedUrl(opts));
expect(url.protocol).toBe('http:');
expect(url.hostname).toBe('example.com');
});
it('should handle https endpoint', () => {
const opts = {...baseOptions, endpoint: 'https://example.com'};
const url = new URL(generatePresignedUrl(opts));
expect(url.protocol).toBe('https:');
expect(url.hostname).toBe('example.com');
});
it('should handle endpoint with port', () => {
const opts = {...baseOptions, endpoint: 'http://localhost:9000'};
const url = new URL(generatePresignedUrl(opts));
expect(url.hostname).toBe('localhost');
expect(url.port).toBe('9000');
});
});
});

View File

@@ -0,0 +1,226 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createMockLogger} from '@fluxer/logger/src/mock';
import type {S3AuthConfig} from '@fluxer/s3/src/middleware/S3AuthMiddleware';
import {createS3AuthMiddleware} from '@fluxer/s3/src/middleware/S3AuthMiddleware';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import {Hono} from 'hono';
import {describe, expect, it, vi} from 'vitest';
describe('S3AuthMiddleware', () => {
describe('missing credentials', () => {
it('should reject requests when no credentials are configured', async () => {
const mockLogger = createMockLogger();
const errorSpy = vi.spyOn(mockLogger, 'error');
const middleware = createS3AuthMiddleware({}, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const res = await app.request('http://localhost/');
expect(res.status).toBe(403);
expect(errorSpy).toHaveBeenCalledWith(expect.stringContaining('S3 credentials not configured'));
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
});
it('should reject requests when only access key is configured', async () => {
const mockLogger = createMockLogger();
const errorSpy = vi.spyOn(mockLogger, 'error');
const config: S3AuthConfig = {
accessKey: 'test-key',
secretKey: undefined,
};
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const res = await app.request('http://localhost/');
expect(res.status).toBe(403);
expect(errorSpy).toHaveBeenCalledWith(expect.stringContaining('S3 credentials not configured'));
});
it('should reject requests when only secret key is configured', async () => {
const mockLogger = createMockLogger();
const errorSpy = vi.spyOn(mockLogger, 'error');
const config: S3AuthConfig = {
accessKey: undefined,
secretKey: 'test-secret',
};
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const res = await app.request('http://localhost/');
expect(res.status).toBe(403);
expect(errorSpy).toHaveBeenCalledWith(expect.stringContaining('S3 credentials not configured'));
});
});
describe('authorization header authentication', () => {
const mockLogger = createMockLogger();
const config: S3AuthConfig = {
accessKey: 'test-access-key',
secretKey: 'test-secret-key',
};
it('should reject requests without auth header', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const res = await app.request('http://localhost/');
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
expect(body).toContain('No valid authentication provided');
});
it('should reject requests with invalid authorization header format', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const res = await app.request('http://localhost/', {
headers: {
authorization: 'InvalidFormat',
},
});
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
});
it('should reject requests with wrong access key', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const authHeader = `AWS4-HMAC-SHA256 Credential=wrong-key/20240101/us-east-1/s3/aws4_request, SignedHeaders=host, Signature=abc123`;
const res = await app.request('http://localhost/', {
headers: {
authorization: authHeader,
'x-amz-date': '20240101T000000Z',
},
});
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>InvalidAccessKeyId</Code>');
});
});
describe('presigned URL authentication', () => {
const mockLogger = createMockLogger();
const config: S3AuthConfig = {
accessKey: 'test-access-key',
secretKey: 'test-secret-key',
};
it('should reject presigned URL with wrong access key', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const url = new URL('http://localhost/');
url.searchParams.set('X-Amz-Algorithm', 'AWS4-HMAC-SHA256');
url.searchParams.set('X-Amz-Credential', 'wrong-key/20240101/us-east-1/s3/aws4_request');
url.searchParams.set('X-Amz-Date', '20240101T000000Z');
url.searchParams.set('X-Amz-Expires', '300');
url.searchParams.set('X-Amz-SignedHeaders', 'host');
url.searchParams.set('X-Amz-Signature', 'abc123');
const res = await app.request(url.toString());
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>InvalidAccessKeyId</Code>');
});
it('should reject presigned URL with missing parameters', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const url = new URL('http://localhost/');
url.searchParams.set('X-Amz-Algorithm', 'AWS4-HMAC-SHA256');
const res = await app.request(url.toString());
expect(res.status).toBe(400);
const body = await res.text();
expect(body).toContain('<Code>InvalidArgument</Code>');
});
it('should reject expired presigned URL', async () => {
const middleware = createS3AuthMiddleware(config, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/', () => new Response('OK'));
const pastDate = new Date(Date.now() - 3600000);
const amzDate = pastDate.toISOString().replace(/[:-]|\.\d+/g, '');
const url = new URL('http://localhost/');
url.searchParams.set('X-Amz-Algorithm', 'AWS4-HMAC-SHA256');
url.searchParams.set('X-Amz-Credential', `${config.accessKey}/20240101/us-east-1/s3/aws4_request`);
url.searchParams.set('X-Amz-Date', amzDate);
url.searchParams.set('X-Amz-Expires', '1');
url.searchParams.set('X-Amz-SignedHeaders', 'host');
url.searchParams.set('X-Amz-Signature', 'abc123');
const res = await app.request(url.toString());
expect(res.status).toBe(403);
const body = await res.text();
expect(body).toContain('<Code>AccessDenied</Code>');
expect(body).toContain('expired');
});
});
describe('health endpoint bypass', () => {
it('should allow health endpoint without authentication', async () => {
const mockLogger = createMockLogger();
const errorSpy = vi.spyOn(mockLogger, 'error');
const middleware = createS3AuthMiddleware({}, mockLogger);
const app = new Hono<HonoEnv>();
app.use('*', middleware);
app.get('/_health', () => new Response('OK'));
const res = await app.request('http://localhost/_health');
expect(res.status).toBe(200);
expect(errorSpy).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,231 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {S3Error, S3Errors} from '@fluxer/s3/src/errors/S3Error';
import {describe, expect, it} from 'vitest';
describe('S3Error', () => {
describe('constructor', () => {
it('should create error with correct properties', () => {
const error = new S3Error('NoSuchBucket', 'The bucket does not exist', {
resource: 'my-bucket',
requestId: 'req-123',
});
expect(error.code).toBe('NoSuchBucket');
expect(error.message).toBe('The bucket does not exist');
expect(error.resource).toBe('my-bucket');
expect(error.requestId).toBe('req-123');
expect(error.status).toBe(404);
});
it('should map error codes to correct HTTP status', () => {
expect(new S3Error('AccessDenied', 'test').status).toBe(403);
expect(new S3Error('NoSuchBucket', 'test').status).toBe(404);
expect(new S3Error('NoSuchKey', 'test').status).toBe(404);
expect(new S3Error('BucketAlreadyExists', 'test').status).toBe(409);
expect(new S3Error('BucketNotEmpty', 'test').status).toBe(409);
expect(new S3Error('InternalError', 'test').status).toBe(500);
expect(new S3Error('InvalidArgument', 'test').status).toBe(400);
expect(new S3Error('InvalidRange', 'test').status).toBe(416);
expect(new S3Error('MissingContentLength', 'test').status).toBe(411);
expect(new S3Error('PreconditionFailed', 'test').status).toBe(412);
});
});
describe('toXml', () => {
it('should generate valid XML error response', () => {
const error = new S3Error('NoSuchBucket', 'The bucket does not exist', {
resource: 'my-bucket',
requestId: 'req-123',
});
const xml = error.toXml();
expect(xml).toContain('<?xml version="1.0" encoding="UTF-8"?>');
expect(xml).toContain('<Error>');
expect(xml).toContain('<Code>NoSuchBucket</Code>');
expect(xml).toContain('<Message>The bucket does not exist</Message>');
expect(xml).toContain('<Resource>my-bucket</Resource>');
expect(xml).toContain('<RequestId>req-123</RequestId>');
expect(xml).toContain('</Error>');
});
it('should escape special characters in XML', () => {
const error = new S3Error('InvalidArgument', 'Value <test> is invalid', {
resource: 'key&name',
});
const xml = error.toXml();
expect(xml).toContain('<Message>Value &lt;test&gt; is invalid</Message>');
expect(xml).toContain('<Resource>key&amp;name</Resource>');
});
it('should omit optional fields if not provided', () => {
const error = new S3Error('InternalError', 'Something went wrong');
const xml = error.toXml();
expect(xml).not.toContain('<Resource>');
expect(xml).not.toContain('<RequestId>');
});
});
describe('getResponse', () => {
it('should return Response with correct status and headers', () => {
const error = new S3Error('NoSuchBucket', 'The bucket does not exist', {
requestId: 'req-123',
});
const response = error.getResponse();
expect(response.status).toBe(404);
expect(response.headers.get('Content-Type')).toBe('application/xml');
expect(response.headers.get('x-amz-request-id')).toBe('req-123');
});
it('should default requestId to unknown if not set', () => {
const error = new S3Error('InternalError', 'test');
const response = error.getResponse();
expect(response.headers.get('x-amz-request-id')).toBe('unknown');
});
});
});
describe('S3Errors factory functions', () => {
it('accessDenied should create AccessDenied error', () => {
const error = S3Errors.accessDenied('Custom message', 'resource');
expect(error.code).toBe('AccessDenied');
expect(error.message).toBe('Custom message');
expect(error.resource).toBe('resource');
expect(error.status).toBe(403);
});
it('accessDenied should use default message', () => {
const error = S3Errors.accessDenied();
expect(error.message).toBe('Access Denied');
});
it('signatureDoesNotMatch should create SignatureDoesNotMatch error', () => {
const error = S3Errors.signatureDoesNotMatch();
expect(error.code).toBe('SignatureDoesNotMatch');
expect(error.status).toBe(403);
});
it('noSuchBucket should create NoSuchBucket error', () => {
const error = S3Errors.noSuchBucket('missing-bucket');
expect(error.code).toBe('NoSuchBucket');
expect(error.resource).toBe('missing-bucket');
expect(error.status).toBe(404);
});
it('noSuchKey should create NoSuchKey error', () => {
const error = S3Errors.noSuchKey('missing-key');
expect(error.code).toBe('NoSuchKey');
expect(error.resource).toBe('missing-key');
expect(error.status).toBe(404);
});
it('noSuchUpload should create NoSuchUpload error', () => {
const error = S3Errors.noSuchUpload('upload-id');
expect(error.code).toBe('NoSuchUpload');
expect(error.resource).toBe('upload-id');
});
it('bucketAlreadyOwnedByYou should create BucketAlreadyOwnedByYou error', () => {
const error = S3Errors.bucketAlreadyOwnedByYou('my-bucket');
expect(error.code).toBe('BucketAlreadyOwnedByYou');
expect(error.status).toBe(409);
});
it('bucketNotEmpty should create BucketNotEmpty error', () => {
const error = S3Errors.bucketNotEmpty('my-bucket');
expect(error.code).toBe('BucketNotEmpty');
expect(error.status).toBe(409);
});
it('invalidAccessKeyId should create InvalidAccessKeyId error', () => {
const error = S3Errors.invalidAccessKeyId();
expect(error.code).toBe('InvalidAccessKeyId');
expect(error.status).toBe(403);
});
it('invalidArgument should create InvalidArgument error', () => {
const error = S3Errors.invalidArgument('Bad value');
expect(error.code).toBe('InvalidArgument');
expect(error.message).toBe('Bad value');
expect(error.status).toBe(400);
});
it('invalidBucketName should create InvalidBucketName error', () => {
const error = S3Errors.invalidBucketName('bad_bucket');
expect(error.code).toBe('InvalidBucketName');
expect(error.status).toBe(400);
});
it('invalidRange should create InvalidRange error', () => {
const error = S3Errors.invalidRange();
expect(error.code).toBe('InvalidRange');
expect(error.status).toBe(416);
});
it('invalidPart should create InvalidPart error', () => {
const error = S3Errors.invalidPart();
expect(error.code).toBe('InvalidPart');
});
it('invalidPartOrder should create InvalidPartOrder error', () => {
const error = S3Errors.invalidPartOrder();
expect(error.code).toBe('InvalidPartOrder');
});
it('missingContentLength should create MissingContentLength error', () => {
const error = S3Errors.missingContentLength();
expect(error.code).toBe('MissingContentLength');
expect(error.status).toBe(411);
});
it('notImplemented should create NotImplemented error', () => {
const error = S3Errors.notImplemented();
expect(error.code).toBe('NotImplemented');
});
it('preconditionFailed should create PreconditionFailed error', () => {
const error = S3Errors.preconditionFailed('If-Match');
expect(error.code).toBe('PreconditionFailed');
expect(error.status).toBe(412);
});
it('requestTimeTooSkewed should create RequestTimeTooSkewed error', () => {
const error = S3Errors.requestTimeTooSkewed();
expect(error.code).toBe('RequestTimeTooSkewed');
});
it('entityTooLarge should create EntityTooLarge error', () => {
const error = S3Errors.entityTooLarge(1024);
expect(error.code).toBe('EntityTooLarge');
});
it('entityTooSmall should create EntityTooSmall error', () => {
const error = S3Errors.entityTooSmall();
expect(error.code).toBe('EntityTooSmall');
});
});

View File

@@ -0,0 +1,484 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fs from 'node:fs/promises';
import {Readable} from 'node:stream';
import {createMockLogger} from '@fluxer/logger/src/mock';
import {S3Error} from '@fluxer/s3/src/errors/S3Error';
import {S3Service} from '@fluxer/s3/src/s3/S3Service';
import {afterEach, beforeEach, describe, expect, it} from 'vitest';
const testRoot = `/tmp/fluxer-s3-service-test-${Date.now()}`;
const mockLogger = createMockLogger();
async function streamToString(stream: Readable): Promise<string> {
return new Response(Readable.toWeb(stream) as ReadableStream).text();
}
beforeEach(async () => {
await fs.rm(testRoot, {recursive: true, force: true});
});
afterEach(async () => {
await fs.rm(testRoot, {recursive: true, force: true});
});
describe('S3Service', () => {
describe('initialize', () => {
it('should create root directory and specified buckets', async () => {
const service = new S3Service({root: testRoot, buckets: ['bucket1', 'bucket2']}, mockLogger);
await service.initialize();
const buckets = await service.listBuckets();
expect(buckets.map((b) => b.name).sort()).toEqual(['bucket1', 'bucket2']);
});
it('should handle already existing buckets during initialization', async () => {
const service = new S3Service({root: testRoot, buckets: ['existing-bucket']}, mockLogger);
await service.initialize();
await service.initialize();
const buckets = await service.listBuckets();
expect(buckets).toHaveLength(1);
});
});
describe('bucket operations', () => {
it('should create and list buckets', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await service.createBucket('test-bucket');
const buckets = await service.listBuckets();
expect(buckets).toHaveLength(1);
expect(buckets[0].name).toBe('test-bucket');
});
it('should throw error when creating duplicate bucket', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await service.createBucket('test-bucket');
await expect(service.createBucket('test-bucket')).rejects.toThrow(S3Error);
});
it('should delete empty bucket', async () => {
const service = new S3Service({root: testRoot, buckets: ['delete-me']}, mockLogger);
await service.initialize();
await service.deleteBucket('delete-me');
const buckets = await service.listBuckets();
expect(buckets).toHaveLength(0);
});
it('should throw error when deleting non-existent bucket', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await expect(service.deleteBucket('nonexistent')).rejects.toThrow(S3Error);
});
it('should throw error when deleting non-empty bucket', async () => {
const service = new S3Service({root: testRoot, buckets: ['non-empty']}, mockLogger);
await service.initialize();
await service.putObject('non-empty', 'file.txt', Buffer.from('content'), {});
await expect(service.deleteBucket('non-empty')).rejects.toThrow(S3Error);
});
it('should check bucket existence', async () => {
const service = new S3Service({root: testRoot, buckets: ['exists']}, mockLogger);
await service.initialize();
expect(await service.bucketExists('exists')).toBe(true);
expect(await service.bucketExists('not-exists')).toBe(false);
});
it('should head bucket successfully', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await expect(service.headBucket('test-bucket')).resolves.toBeUndefined();
});
it('should throw error when heading non-existent bucket', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await expect(service.headBucket('nonexistent')).rejects.toThrow(S3Error);
});
});
describe('object operations', () => {
it('should put and get object', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const content = Buffer.from('Hello, World!');
await service.putObject('test-bucket', 'hello.txt', content, {
contentType: 'text/plain',
});
const result = await service.getObject('test-bucket', 'hello.txt');
const data = await streamToString(result.stream);
expect(data).toBe('Hello, World!');
expect(result.metadata.contentType).toBe('text/plain');
expect(result.metadata.size).toBe(content.length);
});
it('should throw error when getting non-existent object', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await expect(service.getObject('test-bucket', 'nonexistent.txt')).rejects.toThrow(S3Error);
});
it('should throw error when getting object from non-existent bucket', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await expect(service.getObject('nonexistent', 'file.txt')).rejects.toThrow(S3Error);
});
it('should get object with range', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const content = Buffer.from('Hello, World!');
await service.putObject('test-bucket', 'range.txt', content, {});
const result = await service.getObject('test-bucket', 'range.txt', {
range: {start: 0, end: 4},
});
const data = await streamToString(result.stream);
expect(data).toBe('Hello');
expect(result.contentRange).toBe('bytes 0-4/13');
});
it('should throw error for invalid range', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const content = Buffer.from('Short');
await service.putObject('test-bucket', 'short.txt', content, {});
await expect(
service.getObject('test-bucket', 'short.txt', {
range: {start: 100, end: 200},
}),
).rejects.toThrow(S3Error);
});
it('should delete object', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'to-delete.txt', Buffer.from('delete me'), {});
await service.deleteObject('test-bucket', 'to-delete.txt');
await expect(service.getObject('test-bucket', 'to-delete.txt')).rejects.toThrow(S3Error);
});
it('should delete multiple objects', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'file1.txt', Buffer.from('1'), {});
await service.putObject('test-bucket', 'file2.txt', Buffer.from('2'), {});
await service.putObject('test-bucket', 'file3.txt', Buffer.from('3'), {});
const result = await service.deleteObjects('test-bucket', ['file1.txt', 'file2.txt', 'nonexistent.txt']);
expect(result.deleted).toContain('file1.txt');
expect(result.deleted).toContain('file2.txt');
expect(result.deleted).toContain('nonexistent.txt');
expect(result.errors).toHaveLength(0);
});
it('should head object', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const content = Buffer.from('Hello!');
await service.putObject('test-bucket', 'head-me.txt', content, {
contentType: 'text/plain',
});
const metadata = await service.headObject('test-bucket', 'head-me.txt');
expect(metadata.size).toBe(content.length);
expect(metadata.contentType).toBe('text/plain');
expect(metadata.key).toBe('head-me.txt');
});
it('should throw error when heading non-existent object', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await expect(service.headObject('test-bucket', 'nonexistent.txt')).rejects.toThrow(S3Error);
});
it('should put object with user metadata', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'with-meta.txt', Buffer.from('data'), {
metadata: {
'x-amz-meta-author': 'test-user',
'x-amz-meta-version': '1.0',
},
});
const metadata = await service.headObject('test-bucket', 'with-meta.txt');
expect(metadata.metadata['x-amz-meta-author']).toBe('test-user');
expect(metadata.metadata['x-amz-meta-version']).toBe('1.0');
});
});
describe('list objects', () => {
it('should list objects in bucket', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'file1.txt', Buffer.from('1'), {});
await service.putObject('test-bucket', 'file2.txt', Buffer.from('2'), {});
await service.putObject('test-bucket', 'folder/file3.txt', Buffer.from('3'), {});
const result = await service.listObjects('test-bucket', {});
expect(result.contents).toHaveLength(3);
expect(result.keyCount).toBe(3);
});
it('should list objects with prefix', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'prefix-a/file1.txt', Buffer.from('1'), {});
await service.putObject('test-bucket', 'prefix-a/file2.txt', Buffer.from('2'), {});
await service.putObject('test-bucket', 'prefix-b/file3.txt', Buffer.from('3'), {});
const result = await service.listObjects('test-bucket', {prefix: 'prefix-a/'});
expect(result.contents).toHaveLength(2);
expect(result.contents.every((o) => o.key.startsWith('prefix-a/'))).toBe(true);
});
it('should list objects with delimiter', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'folder1/file1.txt', Buffer.from('1'), {});
await service.putObject('test-bucket', 'folder2/file2.txt', Buffer.from('2'), {});
await service.putObject('test-bucket', 'root.txt', Buffer.from('3'), {});
const result = await service.listObjects('test-bucket', {delimiter: '/'});
expect(result.contents.some((o) => o.key === 'root.txt')).toBe(true);
expect(result.commonPrefixes).toContain('folder1/');
expect(result.commonPrefixes).toContain('folder2/');
});
it('should paginate with maxKeys', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
for (let i = 0; i < 5; i++) {
await service.putObject('test-bucket', `file${i}.txt`, Buffer.from(`${i}`), {});
}
const result = await service.listObjects('test-bucket', {maxKeys: 2});
expect(result.contents).toHaveLength(2);
expect(result.isTruncated).toBe(true);
});
it('should continue from marker', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.putObject('test-bucket', 'a.txt', Buffer.from('a'), {});
await service.putObject('test-bucket', 'b.txt', Buffer.from('b'), {});
await service.putObject('test-bucket', 'c.txt', Buffer.from('c'), {});
const result = await service.listObjects('test-bucket', {marker: 'a.txt'});
expect(result.contents.some((o) => o.key === 'a.txt')).toBe(false);
expect(result.contents.some((o) => o.key === 'b.txt')).toBe(true);
});
});
describe('copy object', () => {
it('should copy object within same bucket', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const content = Buffer.from('Original content');
await service.putObject('test-bucket', 'source.txt', content, {
contentType: 'text/plain',
});
await service.copyObject('test-bucket', 'source.txt', 'test-bucket', 'destination.txt');
const result = await service.getObject('test-bucket', 'destination.txt');
const data = await streamToString(result.stream);
expect(data).toBe('Original content');
});
it('should copy object between buckets', async () => {
const service = new S3Service({root: testRoot, buckets: ['source-bucket', 'dest-bucket']}, mockLogger);
await service.initialize();
await service.putObject('source-bucket', 'file.txt', Buffer.from('cross-bucket'), {});
await service.copyObject('source-bucket', 'file.txt', 'dest-bucket', 'copied.txt');
const result = await service.getObject('dest-bucket', 'copied.txt');
const data = await streamToString(result.stream);
expect(data).toBe('cross-bucket');
});
it('should throw error when copying non-existent source', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await expect(
service.copyObject('test-bucket', 'nonexistent.txt', 'test-bucket', 'destination.txt'),
).rejects.toThrow(S3Error);
});
});
describe('multipart upload', () => {
it('should create multipart upload', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const result = await service.createMultipartUpload('test-bucket', 'large-file.bin', {
contentType: 'application/octet-stream',
});
expect(result.uploadId).toBeTruthy();
});
it('should upload part', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const upload = await service.createMultipartUpload('test-bucket', 'large-file.bin', {});
const partData = Buffer.from('part data content');
const part = await service.uploadPart('test-bucket', 'large-file.bin', upload.uploadId, 1, partData);
expect(part.etag).toBeTruthy();
});
it('should complete multipart upload', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const upload = await service.createMultipartUpload('test-bucket', 'multi.bin', {});
const part1 = await service.uploadPart('test-bucket', 'multi.bin', upload.uploadId, 1, Buffer.from('part1'));
const part2 = await service.uploadPart('test-bucket', 'multi.bin', upload.uploadId, 2, Buffer.from('part2'));
const result = await service.completeMultipartUpload('test-bucket', 'multi.bin', upload.uploadId, [
{partNumber: 1, etag: part1.etag},
{partNumber: 2, etag: part2.etag},
]);
expect(result.etag).toBeTruthy();
expect(result.location).toContain('multi.bin');
const obj = await service.getObject('test-bucket', 'multi.bin');
const data = await streamToString(obj.stream);
expect(data).toBe('part1part2');
});
it('should abort multipart upload', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const upload = await service.createMultipartUpload('test-bucket', 'abort-me.bin', {});
await service.uploadPart('test-bucket', 'abort-me.bin', upload.uploadId, 1, Buffer.from('data'));
await service.abortMultipartUpload('test-bucket', 'abort-me.bin', upload.uploadId);
await expect(service.getObject('test-bucket', 'abort-me.bin')).rejects.toThrow(S3Error);
});
it('should list parts', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
const upload = await service.createMultipartUpload('test-bucket', 'parts.bin', {});
await service.uploadPart('test-bucket', 'parts.bin', upload.uploadId, 1, Buffer.from('part1'));
await service.uploadPart('test-bucket', 'parts.bin', upload.uploadId, 2, Buffer.from('part2'));
const result = await service.listParts('test-bucket', 'parts.bin', upload.uploadId, {});
expect(result.parts).toHaveLength(2);
expect(result.parts.some((p) => p.partNumber === 1)).toBe(true);
expect(result.parts.some((p) => p.partNumber === 2)).toBe(true);
});
it('should list multipart uploads', async () => {
const service = new S3Service({root: testRoot, buckets: ['test-bucket']}, mockLogger);
await service.initialize();
await service.createMultipartUpload('test-bucket', 'upload1.bin', {});
await service.createMultipartUpload('test-bucket', 'upload2.bin', {});
const result = await service.listMultipartUploads('test-bucket', {});
expect(result.uploads).toHaveLength(2);
});
});
describe('bucket name validation', () => {
it('should reject bucket names shorter than 3 characters', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await expect(service.createBucket('ab')).rejects.toThrow(S3Error);
});
it('should reject bucket names longer than 63 characters', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
const longName = 'a'.repeat(64);
await expect(service.createBucket(longName)).rejects.toThrow(S3Error);
});
it('should reject bucket names with invalid characters', async () => {
const service = new S3Service({root: testRoot, buckets: []}, mockLogger);
await service.initialize();
await expect(service.createBucket('UPPERCASE')).rejects.toThrow(S3Error);
await expect(service.createBucket('under_score')).rejects.toThrow(S3Error);
});
});
});

View File

@@ -0,0 +1,133 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {escapeXml, formatAmzDate, formatISODate, parseAmzDate, xmlHeader, xmlTag} from '@fluxer/s3/src/utils/XmlUtils';
import {describe, expect, it} from 'vitest';
describe('escapeXml', () => {
it('should escape ampersand', () => {
expect(escapeXml('a & b')).toBe('a &amp; b');
});
it('should escape less than', () => {
expect(escapeXml('a < b')).toBe('a &lt; b');
});
it('should escape greater than', () => {
expect(escapeXml('a > b')).toBe('a &gt; b');
});
it('should escape double quotes', () => {
expect(escapeXml('a "b" c')).toBe('a &quot;b&quot; c');
});
it('should escape single quotes', () => {
expect(escapeXml("a 'b' c")).toBe('a &apos;b&apos; c');
});
it('should escape all special characters', () => {
expect(escapeXml('<tag attr="value">a & b</tag>')).toBe('&lt;tag attr=&quot;value&quot;&gt;a &amp; b&lt;/tag&gt;');
});
});
describe('xmlTag', () => {
it('should create XML tag with string value', () => {
expect(xmlTag('Name', 'test')).toBe('<Name>test</Name>');
});
it('should create XML tag with number value', () => {
expect(xmlTag('Count', 42)).toBe('<Count>42</Count>');
});
it('should create XML tag with boolean value', () => {
expect(xmlTag('Active', true)).toBe('<Active>true</Active>');
});
it('should return empty string for undefined value', () => {
expect(xmlTag('Empty', undefined)).toBe('');
});
it('should return empty string for null value', () => {
expect(xmlTag('Empty', null)).toBe('');
});
it('should escape value by default', () => {
expect(xmlTag('Key', '<test>')).toBe('<Key>&lt;test&gt;</Key>');
});
it('should not escape value when shouldEscape is false', () => {
expect(xmlTag('Key', '<test>', false)).toBe('<Key><test></Key>');
});
});
describe('xmlHeader', () => {
it('should return XML declaration', () => {
expect(xmlHeader()).toBe('<?xml version="1.0" encoding="UTF-8"?>\n');
});
});
describe('S3 date utilities', () => {
describe('formatISODate', () => {
it('should format date in ISO format', () => {
const date = new Date('2024-01-15T12:30:45.000Z');
expect(formatISODate(date)).toBe('2024-01-15T12:30:45.000Z');
});
it('should handle dates with milliseconds', () => {
const date = new Date('2024-06-01T00:00:00.123Z');
expect(formatISODate(date)).toBe('2024-06-01T00:00:00.123Z');
});
});
describe('formatAmzDate', () => {
it('should format date in AMZ format (compact ISO)', () => {
const date = new Date('2024-01-15T12:30:45.000Z');
expect(formatAmzDate(date)).toBe('20240115T123045Z');
});
it('should remove milliseconds', () => {
const date = new Date('2024-01-15T12:30:45.999Z');
expect(formatAmzDate(date)).toBe('20240115T123045Z');
});
});
describe('parseAmzDate', () => {
it('should parse valid AMZ date format', () => {
const result = parseAmzDate('20240115T123045Z');
expect(result).toBeInstanceOf(Date);
expect(result?.toISOString()).toBe('2024-01-15T12:30:45.000Z');
});
it('should return null for invalid format', () => {
expect(parseAmzDate('2024-01-15T12:30:45Z')).toBeNull();
expect(parseAmzDate('invalid')).toBeNull();
expect(parseAmzDate('')).toBeNull();
});
it('should handle midnight', () => {
const result = parseAmzDate('20240101T000000Z');
expect(result?.toISOString()).toBe('2024-01-01T00:00:00.000Z');
});
it('should handle end of year', () => {
const result = parseAmzDate('20241231T235959Z');
expect(result?.toISOString()).toBe('2024-12-31T23:59:59.000Z');
});
});
});

View File

@@ -0,0 +1,25 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
export interface S3RateLimitConfig {
enabled: boolean;
maxAttempts: number;
windowMs: number;
skipPaths?: Array<string>;
}

View File

@@ -0,0 +1,71 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createErrorHandler} from '@fluxer/errors/src/ErrorHandler';
import type {LoggerInterface} from '@fluxer/logger/src/LoggerInterface';
import {S3Error} from '@fluxer/s3/src/errors/S3Error';
import {captureException} from '@fluxer/sentry/src/Sentry';
import type {Hono} from 'hono';
import type {HonoEnv} from '../types/HonoEnv';
interface SetupS3ErrorHandlingOptions {
app: Hono<HonoEnv>;
logger: LoggerInterface;
}
export function setupS3ErrorHandling(options: SetupS3ErrorHandlingOptions): void {
const {app, logger} = options;
const errorHandler = createErrorHandler({
includeStack: false,
responseFormat: 'xml',
logError: (err, ctx) => {
const isExpectedError = err instanceof Error && 'isExpected' in err && err.isExpected;
if (!(err instanceof S3Error || isExpectedError)) {
captureException(err);
}
logger.error(
{
error: err.message,
stack: err.stack,
requestId: ctx.get('requestId'),
},
'Request error',
);
},
customHandler: (err, ctx) => {
if (err instanceof S3Error) {
err.requestId = ctx.get('requestId');
return err.getResponse();
}
return undefined;
},
});
app.onError(errorHandler);
app.notFound((ctx) => {
const s3Error = new S3Error('NoSuchKey', 'The specified resource was not found.', {
requestId: ctx.get('requestId'),
});
return s3Error.getResponse();
});
}

View File

@@ -0,0 +1,89 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {Headers} from '@fluxer/constants/src/Headers';
import {applyMiddlewareStack} from '@fluxer/hono/src/middleware/MiddlewareStack';
import type {MetricsCollector} from '@fluxer/hono_types/src/MetricsTypes';
import type {TracingOptions} from '@fluxer/hono_types/src/TracingTypes';
import type {LoggerInterface} from '@fluxer/logger/src/LoggerInterface';
import type {RateLimitService} from '@fluxer/rate_limit/src/RateLimitService';
import type {S3RateLimitConfig} from '@fluxer/s3/src/app/S3AppConfigTypes';
import type {S3AuthConfig} from '@fluxer/s3/src/middleware/S3AuthMiddleware';
import {createS3AuthMiddleware} from '@fluxer/s3/src/middleware/S3AuthMiddleware';
import type {IS3Service} from '@fluxer/s3/src/s3/S3Service';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import type {Hono} from 'hono';
import {createMiddleware} from 'hono/factory';
interface SetupS3MiddlewareOptions {
app: Hono<HonoEnv>;
logger: LoggerInterface;
s3Service: IS3Service;
authConfig: S3AuthConfig;
metricsCollector?: MetricsCollector;
tracing?: TracingOptions;
rateLimitService?: RateLimitService | null;
rateLimitConfig?: S3RateLimitConfig | null;
}
export function setupS3Middleware(options: SetupS3MiddlewareOptions): void {
const {app, logger, s3Service, authConfig, metricsCollector, tracing, rateLimitService, rateLimitConfig} = options;
const serviceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) => {
ctx.set('s3Service', s3Service);
await next();
});
applyMiddlewareStack(app, {
requestId: {headerName: Headers.X_AMZ_REQUEST_ID},
tracing,
metrics: metricsCollector
? {
enabled: true,
collector: metricsCollector,
skipPaths: ['/_health'],
}
: undefined,
logger: {
log: (data) => {
logger.info(
{
method: data.method,
path: data.path,
status: data.status,
durationMs: data.durationMs,
},
'Request completed',
);
},
},
rateLimit:
rateLimitService && rateLimitConfig?.enabled
? {
enabled: true,
service: rateLimitService,
maxAttempts: rateLimitConfig.maxAttempts,
windowMs: rateLimitConfig.windowMs,
skipPaths: rateLimitConfig.skipPaths ?? ['/_health'],
}
: undefined,
customMiddleware: [serviceMiddleware, createS3AuthMiddleware(authConfig, logger)],
skipErrorHandler: true,
});
}

View File

@@ -0,0 +1,60 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {KVCacheProvider} from '@fluxer/cache/src/providers/KVCacheProvider';
import type {IKVProvider} from '@fluxer/kv_client/src/IKVProvider';
import {KVClient} from '@fluxer/kv_client/src/KVClient';
import {throwKVRequiredError} from '@fluxer/rate_limit/src/KVRequiredError';
import {RateLimitService} from '@fluxer/rate_limit/src/RateLimitService';
import type {S3RateLimitConfig} from '@fluxer/s3/src/app/S3AppConfigTypes';
interface ResolveS3RateLimitServiceOptions {
kvUrl?: string;
rateLimitService?: RateLimitService | null;
rateLimitConfig?: S3RateLimitConfig | null;
}
export function resolveS3RateLimitService(options: ResolveS3RateLimitServiceOptions): RateLimitService | null {
const {kvUrl, rateLimitService, rateLimitConfig} = options;
if (rateLimitService) {
return rateLimitService;
}
if (!rateLimitConfig?.enabled) {
return null;
}
if (!kvUrl) {
throwKVRequiredError({
serviceName: 'S3 service',
configPath: 'kvUrl option',
fluxerServerHint: 'rateLimitService is passed in as an option',
});
}
const kvProvider = createKVProvider(kvUrl);
const cacheService: ICacheService = new KVCacheProvider({client: kvProvider});
return new RateLimitService(cacheService);
}
function createKVProvider(kvUrl: string): IKVProvider {
return new KVClient({url: kvUrl});
}

View File

@@ -0,0 +1,50 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {Headers} from '@fluxer/constants/src/Headers';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import type {Hono} from 'hono';
export function setupS3ResponseHeadersMiddleware(app: Hono<HonoEnv>): void {
app.use('*', async (ctx, next) => {
await next();
ctx.header(Headers.X_AMZ_ID_2, ctx.get('requestId'));
ctx.header('Server', 'FluxerS3');
const origin = ctx.req.header('origin');
if (origin) {
ctx.header('Access-Control-Allow-Origin', origin);
ctx.header('Access-Control-Allow-Methods', 'GET, PUT, POST, DELETE, HEAD, OPTIONS');
ctx.header(
'Access-Control-Allow-Headers',
'Authorization, Content-Type, X-Amz-Date, X-Amz-Content-Sha256, X-Amz-User-Agent, X-Amz-Security-Token, X-Amz-Meta-*',
);
ctx.header(
'Access-Control-Expose-Headers',
'ETag, x-amz-request-id, x-amz-id-2, x-amz-version-id, x-amz-delete-marker',
);
ctx.header('Access-Control-Max-Age', '3600');
}
});
app.options('*', (ctx) => {
return ctx.body(null, 200);
});
}

View File

@@ -0,0 +1,32 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {BucketController} from '@fluxer/s3/src/s3/BucketController';
import {ObjectController} from '@fluxer/s3/src/s3/ObjectController';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import type {Hono} from 'hono';
export function registerS3Routes(app: Hono<HonoEnv>): void {
app.get('/_health', (ctx) => {
return ctx.json({ok: true}, 200);
});
ObjectController(app);
BucketController(app);
}

View File

@@ -0,0 +1,313 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {HttpStatus} from '@fluxer/constants/src/HttpConstants';
import {FluxerError, type FluxerErrorStatus} from '@fluxer/errors/src/FluxerError';
export type S3ErrorCode =
| 'AccessDenied'
| 'AccountProblem'
| 'BucketAlreadyExists'
| 'BucketAlreadyOwnedByYou'
| 'BucketNotEmpty'
| 'EntityTooLarge'
| 'EntityTooSmall'
| 'ExpiredToken'
| 'IllegalVersioningConfigurationException'
| 'IncompleteBody'
| 'IncorrectNumberOfFilesInPostRequest'
| 'InlineDataTooLarge'
| 'InternalError'
| 'InvalidAccessKeyId'
| 'InvalidArgument'
| 'InvalidBucketName'
| 'InvalidBucketState'
| 'InvalidDigest'
| 'InvalidLocationConstraint'
| 'InvalidObjectState'
| 'InvalidPart'
| 'InvalidPartOrder'
| 'InvalidRange'
| 'InvalidRequest'
| 'InvalidSecurity'
| 'InvalidStorageClass'
| 'InvalidTargetBucketForLogging'
| 'InvalidToken'
| 'InvalidURI'
| 'KeyTooLongError'
| 'MalformedACLError'
| 'MalformedPOSTRequest'
| 'MalformedXML'
| 'MaxMessageLengthExceeded'
| 'MaxPostPreDataLengthExceededError'
| 'MetadataTooLarge'
| 'MethodNotAllowed'
| 'MissingContentLength'
| 'MissingRequestBodyError'
| 'MissingSecurityElement'
| 'MissingSecurityHeader'
| 'NoLoggingStatusForKey'
| 'NoSuchBucket'
| 'NoSuchBucketPolicy'
| 'NoSuchKey'
| 'NoSuchLifecycleConfiguration'
| 'NoSuchUpload'
| 'NoSuchVersion'
| 'NotImplemented'
| 'NotSignedUp'
| 'OperationAborted'
| 'PermanentRedirect'
| 'PreconditionFailed'
| 'Redirect'
| 'RequestHeaderSectionTooLarge'
| 'RequestIsNotMultiPartContent'
| 'RequestTimeout'
| 'RequestTimeTooSkewed'
| 'RequestTorrentOfBucketError'
| 'SignatureDoesNotMatch'
| 'ServiceUnavailable'
| 'SlowDown'
| 'TemporaryRedirect'
| 'TokenRefreshRequired'
| 'TooManyBuckets'
| 'UnexpectedContent'
| 'UnresolvableGrantByEmailAddress'
| 'UserKeyMustBeSpecified';
const S3_ERROR_STATUS_MAP: Record<S3ErrorCode, FluxerErrorStatus> = {
AccessDenied: HttpStatus.FORBIDDEN,
AccountProblem: HttpStatus.FORBIDDEN,
BucketAlreadyExists: HttpStatus.CONFLICT,
BucketAlreadyOwnedByYou: HttpStatus.CONFLICT,
BucketNotEmpty: HttpStatus.CONFLICT,
EntityTooLarge: HttpStatus.BAD_REQUEST,
EntityTooSmall: HttpStatus.BAD_REQUEST,
ExpiredToken: HttpStatus.BAD_REQUEST,
IllegalVersioningConfigurationException: HttpStatus.BAD_REQUEST,
IncompleteBody: HttpStatus.BAD_REQUEST,
IncorrectNumberOfFilesInPostRequest: HttpStatus.BAD_REQUEST,
InlineDataTooLarge: HttpStatus.BAD_REQUEST,
InternalError: HttpStatus.INTERNAL_SERVER_ERROR,
InvalidAccessKeyId: HttpStatus.FORBIDDEN,
InvalidArgument: HttpStatus.BAD_REQUEST,
InvalidBucketName: HttpStatus.BAD_REQUEST,
InvalidBucketState: HttpStatus.CONFLICT,
InvalidDigest: HttpStatus.BAD_REQUEST,
InvalidLocationConstraint: HttpStatus.BAD_REQUEST,
InvalidObjectState: HttpStatus.FORBIDDEN,
InvalidPart: HttpStatus.BAD_REQUEST,
InvalidPartOrder: HttpStatus.BAD_REQUEST,
InvalidRange: HttpStatus.RANGE_NOT_SATISFIABLE,
InvalidRequest: HttpStatus.BAD_REQUEST,
InvalidSecurity: HttpStatus.FORBIDDEN,
InvalidStorageClass: HttpStatus.BAD_REQUEST,
InvalidTargetBucketForLogging: HttpStatus.BAD_REQUEST,
InvalidToken: HttpStatus.BAD_REQUEST,
InvalidURI: HttpStatus.BAD_REQUEST,
KeyTooLongError: HttpStatus.BAD_REQUEST,
MalformedACLError: HttpStatus.BAD_REQUEST,
MalformedPOSTRequest: HttpStatus.BAD_REQUEST,
MalformedXML: HttpStatus.BAD_REQUEST,
MaxMessageLengthExceeded: HttpStatus.BAD_REQUEST,
MaxPostPreDataLengthExceededError: HttpStatus.BAD_REQUEST,
MetadataTooLarge: HttpStatus.BAD_REQUEST,
MethodNotAllowed: HttpStatus.METHOD_NOT_ALLOWED,
MissingContentLength: HttpStatus.LENGTH_REQUIRED,
MissingRequestBodyError: HttpStatus.BAD_REQUEST,
MissingSecurityElement: HttpStatus.BAD_REQUEST,
MissingSecurityHeader: HttpStatus.BAD_REQUEST,
NoLoggingStatusForKey: HttpStatus.BAD_REQUEST,
NoSuchBucket: HttpStatus.NOT_FOUND,
NoSuchBucketPolicy: HttpStatus.NOT_FOUND,
NoSuchKey: HttpStatus.NOT_FOUND,
NoSuchLifecycleConfiguration: HttpStatus.NOT_FOUND,
NoSuchUpload: HttpStatus.NOT_FOUND,
NoSuchVersion: HttpStatus.NOT_FOUND,
NotImplemented: HttpStatus.NOT_IMPLEMENTED,
NotSignedUp: HttpStatus.FORBIDDEN,
OperationAborted: HttpStatus.CONFLICT,
PermanentRedirect: HttpStatus.MOVED_PERMANENTLY,
PreconditionFailed: HttpStatus.PRECONDITION_FAILED,
Redirect: HttpStatus.TEMPORARY_REDIRECT,
RequestHeaderSectionTooLarge: HttpStatus.BAD_REQUEST,
RequestIsNotMultiPartContent: HttpStatus.BAD_REQUEST,
RequestTimeout: HttpStatus.REQUEST_TIMEOUT,
RequestTimeTooSkewed: HttpStatus.FORBIDDEN,
RequestTorrentOfBucketError: HttpStatus.BAD_REQUEST,
SignatureDoesNotMatch: HttpStatus.FORBIDDEN,
ServiceUnavailable: HttpStatus.SERVICE_UNAVAILABLE,
SlowDown: HttpStatus.SERVICE_UNAVAILABLE,
TemporaryRedirect: HttpStatus.TEMPORARY_REDIRECT,
TokenRefreshRequired: HttpStatus.BAD_REQUEST,
TooManyBuckets: HttpStatus.BAD_REQUEST,
UnexpectedContent: HttpStatus.BAD_REQUEST,
UnresolvableGrantByEmailAddress: HttpStatus.BAD_REQUEST,
UserKeyMustBeSpecified: HttpStatus.BAD_REQUEST,
};
export class S3Error extends FluxerError {
readonly resource?: string;
requestId?: string;
constructor(
code: S3ErrorCode,
message: string,
options?: {
resource?: string;
requestId?: string;
},
) {
const status = S3_ERROR_STATUS_MAP[code];
super({
code,
message,
status,
});
this.name = 'S3Error';
if (options?.resource !== undefined) {
this.resource = options.resource;
}
if (options?.requestId !== undefined) {
this.requestId = options.requestId;
}
}
override getResponse(): Response {
const xml = this.toXml();
return new Response(xml, {
status: this.status,
headers: {
'Content-Type': 'application/xml',
'x-amz-request-id': this.requestId ?? 'unknown',
},
});
}
toXml(): string {
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n';
xml += '<Error>\n';
xml += ` <Code>${this.code}</Code>\n`;
xml += ` <Message>${escapeXml(this.message)}</Message>\n`;
if (this.resource) {
xml += ` <Resource>${escapeXml(this.resource)}</Resource>\n`;
}
if (this.requestId) {
xml += ` <RequestId>${escapeXml(this.requestId)}</RequestId>\n`;
}
xml += '</Error>';
return xml;
}
}
function escapeXml(str: string): string {
return str
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;');
}
export const S3Errors = {
accessDenied: (message = 'Access Denied', resource?: string) =>
new S3Error('AccessDenied', message, resource !== undefined ? {resource} : undefined),
bucketAlreadyExists: (bucket: string) =>
new S3Error(
'BucketAlreadyExists',
'The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again.',
{resource: bucket},
),
bucketAlreadyOwnedByYou: (bucket: string) =>
new S3Error(
'BucketAlreadyOwnedByYou',
'Your previous request to create the named bucket succeeded and you already own it.',
{resource: bucket},
),
bucketNotEmpty: (bucket: string) =>
new S3Error('BucketNotEmpty', 'The bucket you tried to delete is not empty.', {resource: bucket}),
internalError: (message = 'We encountered an internal error. Please try again.') =>
new S3Error('InternalError', message),
invalidAccessKeyId: () =>
new S3Error('InvalidAccessKeyId', 'The AWS Access Key Id you provided does not exist in our records.'),
invalidArgument: (message: string, resource?: string) =>
new S3Error('InvalidArgument', message, resource !== undefined ? {resource} : undefined),
invalidBucketName: (bucket: string) =>
new S3Error('InvalidBucketName', 'The specified bucket is not valid.', {resource: bucket}),
invalidDigest: () => new S3Error('InvalidDigest', 'The Content-MD5 you specified was invalid.'),
invalidPart: () => new S3Error('InvalidPart', 'One or more of the specified parts could not be found.'),
invalidPartOrder: () => new S3Error('InvalidPartOrder', 'The list of parts was not in ascending order.'),
invalidRange: () => new S3Error('InvalidRange', 'The requested range is not satisfiable.'),
malformedXml: (
message = 'The XML you provided was not well-formed or did not validate against our published schema.',
) => new S3Error('MalformedXML', message),
methodNotAllowed: (_method: string, resource?: string) =>
new S3Error(
'MethodNotAllowed',
'The specified method is not allowed against this resource.',
resource !== undefined ? {resource} : undefined,
),
missingContentLength: () => new S3Error('MissingContentLength', 'You must provide the Content-Length HTTP header.'),
noSuchBucket: (bucket: string) =>
new S3Error('NoSuchBucket', 'The specified bucket does not exist.', {resource: bucket}),
noSuchKey: (key: string) => new S3Error('NoSuchKey', 'The specified key does not exist.', {resource: key}),
noSuchUpload: (uploadId: string) =>
new S3Error(
'NoSuchUpload',
'The specified multipart upload does not exist. The upload ID may be invalid, or the upload may have been aborted or completed.',
{resource: uploadId},
),
notImplemented: (message = 'A header you provided implies functionality that is not implemented.') =>
new S3Error('NotImplemented', message),
preconditionFailed: (_condition: string) =>
new S3Error('PreconditionFailed', 'At least one of the preconditions you specified did not hold.'),
requestTimeTooSkewed: () =>
new S3Error('RequestTimeTooSkewed', 'The difference between the request time and the current time is too large.'),
signatureDoesNotMatch: () =>
new S3Error(
'SignatureDoesNotMatch',
'The request signature we calculated does not match the signature you provided.',
),
entityTooLarge: (_maxSize: number) =>
new S3Error('EntityTooLarge', 'Your proposed upload exceeds the maximum allowed object size.'),
entityTooSmall: () =>
new S3Error('EntityTooSmall', 'Your proposed upload is smaller than the minimum allowed object size.'),
};

View File

@@ -0,0 +1,52 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {LoggerInterface} from '@fluxer/logger/src/LoggerInterface';
import {S3Errors} from '@fluxer/s3/src/errors/S3Error';
import {authenticateS3Request} from '@fluxer/s3/src/middleware/S3RequestAuthenticator';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import type {MiddlewareHandler} from 'hono';
export interface S3AuthConfig {
accessKey?: string;
secretKey?: string;
}
export function createS3AuthMiddleware(config: S3AuthConfig, logger: LoggerInterface): MiddlewareHandler<HonoEnv> {
return async (ctx, next) => {
if (ctx.req.path === '/_health') {
await next();
return;
}
const accessKey = config.accessKey;
const secretKey = config.secretKey;
if (!accessKey || !secretKey) {
logger.error('S3 credentials not configured');
throw S3Errors.accessDenied('Service not configured');
}
const principal = await authenticateS3Request(ctx, {accessKey, secretKey});
ctx.set('accessKeyId', principal.accessKeyId);
ctx.set('authenticated', true);
await next();
};
}

View File

@@ -0,0 +1,271 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {timingSafeEqual} from 'node:crypto';
import {S3Errors} from '@fluxer/s3/src/errors/S3Error';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import {hmacSha256, sha256} from '@fluxer/s3/src/utils/Crypto';
import type {Context} from 'hono';
interface S3AuthCredentials {
accessKey: string;
secretKey: string;
}
interface AwsCredential {
accessKeyId: string;
date: string;
region: string;
service: string;
}
interface AuthorizationParams {
credential: AwsCredential;
signedHeaders: Array<string>;
signature: string;
}
interface S3AuthenticationResult {
accessKeyId: string;
}
type S3Context = Context<HonoEnv>;
const MAX_TIME_SKEW_MS = 15 * 60 * 1000;
export async function authenticateS3Request(
ctx: S3Context,
credentials: S3AuthCredentials,
): Promise<S3AuthenticationResult> {
const authHeader = ctx.req.header('authorization');
const url = new URL(ctx.req.url);
const algorithm = url.searchParams.get('X-Amz-Algorithm');
if (algorithm === 'AWS4-HMAC-SHA256') {
return verifyPresignedUrl(ctx, url, credentials);
}
if (authHeader?.startsWith('AWS4-HMAC-SHA256')) {
return verifyAuthorizationHeader(ctx, authHeader, credentials);
}
throw S3Errors.accessDenied('No valid authentication provided');
}
async function verifyAuthorizationHeader(
ctx: S3Context,
authHeader: string,
credentials: S3AuthCredentials,
): Promise<S3AuthenticationResult> {
const params = parseAuthorizationHeader(authHeader);
if (params.credential.accessKeyId !== credentials.accessKey) {
throw S3Errors.invalidAccessKeyId();
}
const amzDate = ctx.req.header('x-amz-date');
if (!amzDate) {
throw S3Errors.invalidArgument('Missing X-Amz-Date header');
}
const requestTime = parseAmzDateToMs(amzDate);
const now = Date.now();
if (Math.abs(now - requestTime) > MAX_TIME_SKEW_MS) {
throw S3Errors.accessDenied('Request timestamp is outside the allowed time window');
}
const isValid = await verifySignature(ctx, params, amzDate, credentials.secretKey, false);
if (!isValid) {
throw S3Errors.signatureDoesNotMatch();
}
return {accessKeyId: params.credential.accessKeyId};
}
async function verifyPresignedUrl(
ctx: S3Context,
url: URL,
credentials: S3AuthCredentials,
): Promise<S3AuthenticationResult> {
const credential = url.searchParams.get('X-Amz-Credential');
const signedHeaders = url.searchParams.get('X-Amz-SignedHeaders');
const signature = url.searchParams.get('X-Amz-Signature');
const amzDate = url.searchParams.get('X-Amz-Date');
const expires = url.searchParams.get('X-Amz-Expires');
if (!credential || !signedHeaders || !signature || !amzDate) {
throw S3Errors.invalidArgument('Missing presigned URL parameters');
}
const credentialParts = credential.split('/');
if (credentialParts.length !== 5) {
throw S3Errors.invalidArgument('Invalid credential format');
}
const [accessKeyId, date, region, service] = credentialParts;
if (accessKeyId !== credentials.accessKey) {
throw S3Errors.invalidAccessKeyId();
}
if (expires) {
const expiresSeconds = parseInt(expires, 10);
const requestDate = parseAmzDateToMs(amzDate);
if (Date.now() > requestDate + expiresSeconds * 1000) {
throw S3Errors.accessDenied('Request has expired');
}
}
const params: AuthorizationParams = {
credential: {
accessKeyId: accessKeyId!,
date: date!,
region: region!,
service: service!,
},
signedHeaders: signedHeaders.split(';'),
signature,
};
const isValid = await verifySignature(ctx, params, amzDate, credentials.secretKey, true);
if (!isValid) {
throw S3Errors.signatureDoesNotMatch();
}
return {accessKeyId};
}
function parseAuthorizationHeader(header: string): AuthorizationParams {
const match = header.match(
/^AWS4-HMAC-SHA256\s+Credential=([^,]+),\s*SignedHeaders=([^,]+),\s*Signature=([a-fA-F0-9]+)$/,
);
if (!match) {
throw S3Errors.invalidArgument('Invalid Authorization header format');
}
const [, credentialStr, signedHeadersStr, signature] = match;
const credentialParts = credentialStr!.split('/');
if (credentialParts.length !== 5) {
throw S3Errors.invalidArgument('Invalid credential format');
}
const [accessKeyId, date, region, service, request] = credentialParts;
if (request !== 'aws4_request') {
throw S3Errors.invalidArgument('Invalid credential terminator');
}
return {
credential: {
accessKeyId: accessKeyId!,
date: date!,
region: region!,
service: service!,
},
signedHeaders: signedHeadersStr!.split(';'),
signature: signature!,
};
}
function parseAmzDateToMs(amzDate: string): number {
const year = parseInt(amzDate.slice(0, 4), 10);
const month = parseInt(amzDate.slice(4, 6), 10) - 1;
const day = parseInt(amzDate.slice(6, 8), 10);
const hour = parseInt(amzDate.slice(9, 11), 10);
const minute = parseInt(amzDate.slice(11, 13), 10);
const second = parseInt(amzDate.slice(13, 15), 10);
return Date.UTC(year, month, day, hour, minute, second);
}
async function verifySignature(
ctx: S3Context,
params: AuthorizationParams,
amzDate: string,
secretKey: string,
isPresigned: boolean,
): Promise<boolean> {
const method = ctx.req.method;
const url = new URL(ctx.req.url);
const canonicalUri = url.pathname;
const queryParams = new Map<string, string>();
url.searchParams.forEach((value, key) => {
if (key !== 'X-Amz-Signature') {
queryParams.set(key, value);
}
});
const sortedQueryKeys = Array.from(queryParams.keys()).sort();
const canonicalQueryString = sortedQueryKeys
.map((key) => `${encodeURIComponent(key)}=${encodeURIComponent(queryParams.get(key)!)}`)
.join('&');
const effectiveHost = ctx.req.header('x-forwarded-host') ?? ctx.req.header('host') ?? url.host;
const canonicalHeaders = params.signedHeaders
.map((header) => {
const value = ctx.req.header(header) ?? (header.toLowerCase() === 'host' ? effectiveHost : undefined);
if (value === undefined) {
throw S3Errors.invalidArgument(`Missing signed header: ${header}`);
}
return `${header.toLowerCase()}:${value.trim()}\n`;
})
.join('');
const signedHeadersString = params.signedHeaders.join(';');
let payloadHash: string;
if (isPresigned) {
payloadHash = 'UNSIGNED-PAYLOAD';
} else {
const contentSha256 = ctx.req.header('x-amz-content-sha256');
if (contentSha256) {
payloadHash = contentSha256;
} else {
payloadHash = 'UNSIGNED-PAYLOAD';
}
}
const canonicalRequest = [
method,
canonicalUri,
canonicalQueryString,
canonicalHeaders,
signedHeadersString,
payloadHash,
].join('\n');
const dateStamp = params.credential.date;
const scope = `${dateStamp}/${params.credential.region}/${params.credential.service}/aws4_request`;
const stringToSign = ['AWS4-HMAC-SHA256', amzDate, scope, sha256(canonicalRequest)].join('\n');
const kDate = hmacSha256(`AWS4${secretKey}`, dateStamp);
const kRegion = hmacSha256(kDate, params.credential.region);
const kService = hmacSha256(kRegion, params.credential.service);
const kSigning = hmacSha256(kService, 'aws4_request');
const calculatedSignature = hmacSha256(kSigning, stringToSign).toString('hex');
const calculatedBuffer = Buffer.from(calculatedSignature, 'hex');
const providedBuffer = Buffer.from(params.signature, 'hex');
if (calculatedBuffer.length !== providedBuffer.length) {
return false;
}
return timingSafeEqual(calculatedBuffer, providedBuffer);
}

View File

@@ -0,0 +1,280 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import {formatISODate, xmlHeader, xmlTag} from '@fluxer/s3/src/utils/XmlUtils';
import type {Hono} from 'hono';
export function BucketController(app: Hono<HonoEnv>) {
app.get('/', async (ctx) => {
const s3Service = ctx.get('s3Service');
const buckets = await s3Service.listBuckets();
let xml = xmlHeader();
xml += '<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <Buckets>\n';
for (const bucket of buckets) {
xml += ' <Bucket>\n';
xml += ` ${xmlTag('Name', bucket.name)}\n`;
xml += ` ${xmlTag('CreationDate', formatISODate(bucket.creationDate))}\n`;
xml += ' </Bucket>\n';
}
xml += ' </Buckets>\n';
xml += '</ListAllMyBucketsResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
});
app.put('/:bucket', async (ctx) => {
const bucket = ctx.req.param('bucket');
const s3Service = ctx.get('s3Service');
await s3Service.createBucket(bucket);
return ctx.body(null, 200, {
Location: `/${bucket}`,
});
});
app.delete('/:bucket', async (ctx) => {
const bucket = ctx.req.param('bucket');
const s3Service = ctx.get('s3Service');
const url = new URL(ctx.req.url);
if (url.searchParams.has('delete')) {
return ctx.body(null, 200);
}
await s3Service.deleteBucket(bucket);
return ctx.body(null, 204);
});
app.get('/:bucket', async (ctx) => {
const bucket = ctx.req.param('bucket');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
if (ctx.req.method === 'HEAD') {
await s3Service.headBucket(bucket);
return ctx.body(null, 200);
}
if (url.searchParams.has('location')) {
await s3Service.headBucket(bucket);
let xml = xmlHeader();
xml += '<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
if (url.searchParams.has('versioning')) {
await s3Service.headBucket(bucket);
let xml = xmlHeader();
xml += '<VersioningConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
if (url.searchParams.has('acl')) {
await s3Service.headBucket(bucket);
let xml = xmlHeader();
xml += '<AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <AccessControlList>\n';
xml += ' <Grant>\n';
xml += ' <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser">\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Grantee>\n';
xml += ' <Permission>FULL_CONTROL</Permission>\n';
xml += ' </Grant>\n';
xml += ' </AccessControlList>\n';
xml += '</AccessControlPolicy>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
if (url.searchParams.has('cors')) {
await s3Service.headBucket(bucket);
let xml = xmlHeader();
xml += '<CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
if (url.searchParams.has('uploads')) {
const prefix = url.searchParams.get('prefix') ?? undefined;
const maxUploads = url.searchParams.get('max-uploads');
const result = await s3Service.listMultipartUploads(bucket, {
...(prefix !== undefined && {prefix}),
...(maxUploads !== null && {maxUploads: parseInt(maxUploads, 10)}),
});
let xml = xmlHeader();
xml += '<ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Bucket', bucket)}\n`;
if (prefix) {
xml += ` ${xmlTag('Prefix', prefix)}\n`;
}
xml += ` ${xmlTag('MaxUploads', maxUploads ?? '1000')}\n`;
xml += ` ${xmlTag('IsTruncated', result.isTruncated)}\n`;
for (const upload of result.uploads) {
xml += ' <Upload>\n';
xml += ` ${xmlTag('Key', upload.key)}\n`;
xml += ` ${xmlTag('UploadId', upload.uploadId)}\n`;
xml += ' <Initiator>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Initiator>\n';
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <StorageClass>STANDARD</StorageClass>\n';
xml += ` ${xmlTag('Initiated', formatISODate(upload.initiated))}\n`;
xml += ' </Upload>\n';
}
xml += '</ListMultipartUploadsResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
const listType = url.searchParams.get('list-type');
const prefix = url.searchParams.get('prefix') ?? undefined;
const delimiter = url.searchParams.get('delimiter') ?? undefined;
const maxKeys = url.searchParams.get('max-keys');
const marker = url.searchParams.get('marker') ?? undefined;
const startAfter = url.searchParams.get('start-after') ?? undefined;
const continuationToken = url.searchParams.get('continuation-token') ?? undefined;
const decodedContinuationToken = continuationToken
? Buffer.from(continuationToken, 'base64').toString('utf-8')
: undefined;
const result = await s3Service.listObjects(bucket, {
...(prefix !== undefined && {prefix}),
...(delimiter !== undefined && {delimiter}),
...(maxKeys !== null && {maxKeys: parseInt(maxKeys, 10)}),
...(marker !== undefined && {marker}),
...(startAfter !== undefined && {startAfter}),
...(decodedContinuationToken !== undefined && {continuationToken: decodedContinuationToken}),
});
if (listType === '2') {
let xml = xmlHeader();
xml += '<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Name', bucket)}\n`;
if (prefix) xml += ` ${xmlTag('Prefix', prefix)}\n`;
if (delimiter) xml += ` ${xmlTag('Delimiter', delimiter)}\n`;
xml += ` ${xmlTag('MaxKeys', maxKeys ?? '1000')}\n`;
xml += ` ${xmlTag('KeyCount', result.keyCount)}\n`;
xml += ` ${xmlTag('IsTruncated', result.isTruncated)}\n`;
if (continuationToken) xml += ` ${xmlTag('ContinuationToken', continuationToken)}\n`;
if (result.nextContinuationToken) {
xml += ` ${xmlTag('NextContinuationToken', result.nextContinuationToken)}\n`;
}
if (startAfter) xml += ` ${xmlTag('StartAfter', startAfter)}\n`;
for (const obj of result.contents) {
xml += ' <Contents>\n';
xml += ` ${xmlTag('Key', obj.key)}\n`;
xml += ` ${xmlTag('LastModified', formatISODate(new Date(obj.lastModified)))}\n`;
xml += ` ${xmlTag('ETag', obj.etag)}\n`;
xml += ` ${xmlTag('Size', obj.size)}\n`;
xml += ' <StorageClass>STANDARD</StorageClass>\n';
xml += ' </Contents>\n';
}
for (const prefix of result.commonPrefixes) {
xml += ' <CommonPrefixes>\n';
xml += ` ${xmlTag('Prefix', prefix)}\n`;
xml += ' </CommonPrefixes>\n';
}
xml += '</ListBucketResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
let xml = xmlHeader();
xml += '<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Name', bucket)}\n`;
if (prefix) xml += ` ${xmlTag('Prefix', prefix)}\n`;
if (marker) xml += ` ${xmlTag('Marker', marker)}\n`;
if (delimiter) xml += ` ${xmlTag('Delimiter', delimiter)}\n`;
xml += ` ${xmlTag('MaxKeys', maxKeys ?? '1000')}\n`;
xml += ` ${xmlTag('IsTruncated', result.isTruncated)}\n`;
if (result.nextMarker) xml += ` ${xmlTag('NextMarker', result.nextMarker)}\n`;
for (const obj of result.contents) {
xml += ' <Contents>\n';
xml += ` ${xmlTag('Key', obj.key)}\n`;
xml += ` ${xmlTag('LastModified', formatISODate(new Date(obj.lastModified)))}\n`;
xml += ` ${xmlTag('ETag', obj.etag)}\n`;
xml += ` ${xmlTag('Size', obj.size)}\n`;
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <StorageClass>STANDARD</StorageClass>\n';
xml += ' </Contents>\n';
}
for (const prefix of result.commonPrefixes) {
xml += ' <CommonPrefixes>\n';
xml += ` ${xmlTag('Prefix', prefix)}\n`;
xml += ' </CommonPrefixes>\n';
}
xml += '</ListBucketResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
});
}

View File

@@ -0,0 +1,427 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {S3Errors} from '@fluxer/s3/src/errors/S3Error';
import type {HonoEnv} from '@fluxer/s3/src/types/HonoEnv';
import {formatISODate, xmlHeader, xmlTag} from '@fluxer/s3/src/utils/XmlUtils';
import type {Hono} from 'hono';
import {stream} from 'hono/streaming';
export function ObjectController(app: Hono<HonoEnv>) {
app.post('/:bucket', async (ctx) => {
const bucket = ctx.req.param('bucket');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
if (url.searchParams.has('delete')) {
const bodyText = await ctx.req.text();
const keys = parseDeleteObjectsXml(bodyText);
const result = await s3Service.deleteObjects(bucket, keys);
let xml = xmlHeader();
xml += '<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
for (const key of result.deleted) {
xml += ' <Deleted>\n';
xml += ` ${xmlTag('Key', key)}\n`;
xml += ' </Deleted>\n';
}
for (const error of result.errors) {
xml += ' <Error>\n';
xml += ` ${xmlTag('Key', error.key)}\n`;
xml += ` ${xmlTag('Code', error.code)}\n`;
xml += ` ${xmlTag('Message', error.message)}\n`;
xml += ' </Error>\n';
}
xml += '</DeleteResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
throw S3Errors.invalidArgument('Invalid POST request');
});
app.post('/:bucket/:key{.+}', async (ctx) => {
const bucket = ctx.req.param('bucket');
const key = ctx.req.param('key');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
if (url.searchParams.has('uploads')) {
const contentType = ctx.req.header('content-type') ?? 'application/octet-stream';
const metadata = extractUserMetadata(ctx.req.raw.headers);
const result = await s3Service.createMultipartUpload(bucket, key, {
contentType,
metadata,
});
let xml = xmlHeader();
xml += '<InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Bucket', bucket)}\n`;
xml += ` ${xmlTag('Key', key)}\n`;
xml += ` ${xmlTag('UploadId', result.uploadId)}\n`;
xml += '</InitiateMultipartUploadResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
const uploadId = url.searchParams.get('uploadId');
if (uploadId) {
const bodyText = await ctx.req.text();
const parts = parseCompleteMultipartUploadXml(bodyText);
const result = await s3Service.completeMultipartUpload(bucket, key, uploadId, parts);
let xml = xmlHeader();
xml += '<CompleteMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Location', result.location)}\n`;
xml += ` ${xmlTag('Bucket', bucket)}\n`;
xml += ` ${xmlTag('Key', key)}\n`;
xml += ` ${xmlTag('ETag', result.etag)}\n`;
xml += '</CompleteMultipartUploadResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
throw S3Errors.invalidArgument('Invalid POST request');
});
app.get('/:bucket/:key{.+}', async (ctx) => {
const bucket = ctx.req.param('bucket');
const key = ctx.req.param('key');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
if (ctx.req.method === 'HEAD') {
const metadata = await s3Service.headObject(bucket, key);
const headers: Record<string, string> = {
'Content-Type': metadata.contentType ?? 'application/octet-stream',
'Content-Length': String(metadata.size),
ETag: metadata.etag,
'Last-Modified': new Date(metadata.lastModified).toUTCString(),
'Accept-Ranges': 'bytes',
};
for (const metaKey of Object.keys(metadata.metadata)) {
headers[`x-amz-meta-${metaKey}`] = metadata.metadata[metaKey]!;
}
return ctx.body(null, 200, headers);
}
const uploadId = url.searchParams.get('uploadId');
if (uploadId) {
const maxParts = url.searchParams.get('max-parts');
const partNumberMarker = url.searchParams.get('part-number-marker');
const result = await s3Service.listParts(bucket, key, uploadId, {
...(maxParts !== null && {maxParts: parseInt(maxParts, 10)}),
...(partNumberMarker !== null && {partNumberMarker: parseInt(partNumberMarker, 10)}),
});
let xml = xmlHeader();
xml += '<ListPartsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ` ${xmlTag('Bucket', bucket)}\n`;
xml += ` ${xmlTag('Key', key)}\n`;
xml += ` ${xmlTag('UploadId', uploadId)}\n`;
xml += ' <Initiator>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Initiator>\n';
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <StorageClass>STANDARD</StorageClass>\n';
xml += ` ${xmlTag('IsTruncated', result.isTruncated)}\n`;
if (result.nextPartNumberMarker) {
xml += ` ${xmlTag('NextPartNumberMarker', result.nextPartNumberMarker)}\n`;
}
for (const part of result.parts) {
xml += ' <Part>\n';
xml += ` ${xmlTag('PartNumber', part.partNumber)}\n`;
xml += ` ${xmlTag('LastModified', formatISODate(part.lastModified))}\n`;
xml += ` ${xmlTag('ETag', part.etag)}\n`;
xml += ` ${xmlTag('Size', part.size)}\n`;
xml += ' </Part>\n';
}
xml += '</ListPartsResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
if (url.searchParams.has('acl')) {
await s3Service.headObject(bucket, key);
let xml = xmlHeader();
xml += '<AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/">\n';
xml += ' <Owner>\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Owner>\n';
xml += ' <AccessControlList>\n';
xml += ' <Grant>\n';
xml += ' <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser">\n';
xml += ` ${xmlTag('ID', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ` ${xmlTag('DisplayName', ctx.get('accessKeyId') ?? 'anonymous')}\n`;
xml += ' </Grantee>\n';
xml += ' <Permission>FULL_CONTROL</Permission>\n';
xml += ' </Grant>\n';
xml += ' </AccessControlList>\n';
xml += '</AccessControlPolicy>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
const rangeHeader = ctx.req.header('range');
let range: {start: number; end: number} | undefined;
if (rangeHeader) {
const match = rangeHeader.match(/^bytes=(\d+)-(\d*)$/);
if (match) {
const start = parseInt(match[1]!, 10);
const end = match[2] ? parseInt(match[2], 10) : Infinity;
range = {start, end};
}
}
const ifMatch = ctx.req.header('if-match');
const ifNoneMatch = ctx.req.header('if-none-match');
const ifModifiedSince = ctx.req.header('if-modified-since');
const ifUnmodifiedSince = ctx.req.header('if-unmodified-since');
const result = await s3Service.getObject(bucket, key, range !== undefined ? {range} : undefined);
if (ifMatch && result.metadata.etag !== ifMatch) {
throw S3Errors.preconditionFailed('If-Match');
}
if (ifNoneMatch && result.metadata.etag === ifNoneMatch) {
return ctx.body(null, 304);
}
if (ifModifiedSince) {
const date = new Date(ifModifiedSince);
if (new Date(result.metadata.lastModified) <= date) {
return ctx.body(null, 304);
}
}
if (ifUnmodifiedSince) {
const date = new Date(ifUnmodifiedSince);
if (new Date(result.metadata.lastModified) > date) {
throw S3Errors.preconditionFailed('If-Unmodified-Since');
}
}
const headers: Record<string, string> = {
'Content-Type': result.metadata.contentType ?? 'application/octet-stream',
ETag: result.metadata.etag,
'Last-Modified': new Date(result.metadata.lastModified).toUTCString(),
'Accept-Ranges': 'bytes',
};
for (const [metaKey, metaValue] of Object.entries(result.metadata.metadata)) {
headers[`x-amz-meta-${metaKey}`] = metaValue;
}
if (result.contentRange) {
headers['Content-Range'] = result.contentRange;
headers['Content-Length'] = String(
parseInt(result.contentRange.split('/')[0]!.split('-')[1]!, 10) -
parseInt(result.contentRange.split('/')[0]!.split('-')[0]!.split(' ')[1]!, 10) +
1,
);
ctx.status(206);
} else {
headers['Content-Length'] = String(result.metadata.size);
}
for (const [headerKey, headerValue] of Object.entries(headers)) {
ctx.header(headerKey, headerValue);
}
return stream(ctx, async (streamWriter) => {
for await (const chunk of result.stream) {
await streamWriter.write(chunk);
}
});
});
app.put('/:bucket/:key{.+}', async (ctx) => {
const bucket = ctx.req.param('bucket');
const key = ctx.req.param('key');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
const uploadId = url.searchParams.get('uploadId');
const partNumber = url.searchParams.get('partNumber');
if (uploadId && partNumber) {
const body = ctx.req.raw.body;
if (!body) {
throw S3Errors.missingContentLength();
}
const buffer = await ctx.req.arrayBuffer();
const result = await s3Service.uploadPart(bucket, key, uploadId, parseInt(partNumber, 10), Buffer.from(buffer));
return ctx.body(null, 200, {
ETag: result.etag,
});
}
const copySource = ctx.req.header('x-amz-copy-source');
if (copySource) {
const sourcePath = copySource.startsWith('/') ? copySource.slice(1) : copySource;
const [sourceBucket, ...sourceKeyParts] = sourcePath.split('/');
const sourceKey = sourceKeyParts.join('/');
if (!sourceBucket || !sourceKey) {
throw S3Errors.invalidArgument('Invalid x-amz-copy-source');
}
const metadataDirective = ctx.req.header('x-amz-metadata-directive') as 'COPY' | 'REPLACE' | undefined;
const contentType = ctx.req.header('content-type');
const metadata = extractUserMetadata(ctx.req.raw.headers);
const copyMetadata = metadataDirective === 'REPLACE' ? metadata : undefined;
const copyContentType = metadataDirective === 'REPLACE' ? contentType : undefined;
const result = await s3Service.copyObject(
decodeURIComponent(sourceBucket),
decodeURIComponent(sourceKey),
bucket,
key,
{
metadataDirective: metadataDirective ?? 'COPY',
...(copyMetadata !== undefined && {metadata: copyMetadata}),
...(copyContentType !== undefined && {contentType: copyContentType}),
},
);
let xml = xmlHeader();
xml += '<CopyObjectResult>\n';
xml += ` ${xmlTag('ETag', result.etag)}\n`;
xml += ` ${xmlTag('LastModified', formatISODate(result.lastModified))}\n`;
xml += '</CopyObjectResult>';
return ctx.body(xml, 200, {
'Content-Type': 'application/xml',
});
}
const contentType = ctx.req.header('content-type') ?? 'application/octet-stream';
const contentMd5 = ctx.req.header('content-md5');
const metadata = extractUserMetadata(ctx.req.raw.headers);
const buffer = await ctx.req.arrayBuffer();
const result = await s3Service.putObject(bucket, key, Buffer.from(buffer), {
contentType,
...(contentMd5 !== undefined && {contentMd5}),
metadata,
});
return ctx.body(null, 200, {
ETag: result.etag,
});
});
app.delete('/:bucket/:key{.+}', async (ctx) => {
const bucket = ctx.req.param('bucket');
const key = ctx.req.param('key');
const url = new URL(ctx.req.url);
const s3Service = ctx.get('s3Service');
const uploadId = url.searchParams.get('uploadId');
if (uploadId) {
await s3Service.abortMultipartUpload(bucket, key, uploadId);
return ctx.body(null, 204);
}
await s3Service.deleteObject(bucket, key);
return ctx.body(null, 204);
});
}
function extractUserMetadata(headers: Headers): Record<string, string> {
const metadata: Record<string, string> = {};
headers.forEach((value, headerKey) => {
const lowerKey = headerKey.toLowerCase();
if (lowerKey.startsWith('x-amz-meta-')) {
const metaKey = lowerKey.slice('x-amz-meta-'.length);
metadata[metaKey] = value;
}
});
return metadata;
}
function parseDeleteObjectsXml(xml: string): Array<string> {
const keys: Array<string> = [];
const regex = /<Key>([^<]+)<\/Key>/g;
let match: RegExpExecArray | null;
while ((match = regex.exec(xml)) !== null) {
keys.push(decodeXmlEntities(match[1]!));
}
return keys;
}
function parseCompleteMultipartUploadXml(xml: string): Array<{partNumber: number; etag: string}> {
const parts: Array<{partNumber: number; etag: string}> = [];
const partRegex = /<Part>\s*<PartNumber>(\d+)<\/PartNumber>\s*<ETag>([^<]+)<\/ETag>\s*<\/Part>/g;
let match: RegExpExecArray | null;
while ((match = partRegex.exec(xml)) !== null) {
parts.push({
partNumber: parseInt(match[1]!, 10),
etag: decodeXmlEntities(match[2]!),
});
}
return parts;
}
function decodeXmlEntities(str: string): string {
return str
.replace(/&amp;/g, '&')
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&quot;/g, '"')
.replace(/&apos;/g, "'");
}

View File

@@ -0,0 +1,82 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {hmacSha256, sha256} from '@fluxer/s3/src/utils/Crypto';
export interface PresignedUrlOptions {
method: 'GET' | 'PUT' | 'DELETE';
bucket: string;
key: string;
expiresIn: number;
accessKey: string;
secretKey: string;
endpoint: string;
region?: string;
}
export function generatePresignedUrl(options: PresignedUrlOptions): string {
const {method, bucket, key, expiresIn, accessKey, secretKey, endpoint, region = 'us-east-1'} = options;
const endpointUrl = new URL(endpoint);
const host = endpointUrl.host;
const basePath = endpointUrl.pathname === '/' ? '' : endpointUrl.pathname;
const service = 's3';
const algorithm = 'AWS4-HMAC-SHA256';
const now = new Date();
const amzDate = now.toISOString().replace(/[:-]|\.\d+/g, '');
const dateStamp = amzDate.slice(0, 8);
const canonicalUri = `${basePath}/${bucket}/${key}`;
const canonicalQuery = [
`X-Amz-Algorithm=${algorithm}`,
`X-Amz-Credential=${encodeURIComponent(`${accessKey}/${dateStamp}/${region}/${service}/aws4_request`)}`,
`X-Amz-Date=${amzDate}`,
`X-Amz-Expires=${expiresIn}`,
`X-Amz-SignedHeaders=host`,
].join('&');
const canonicalHeaders = `host:${host}\n`;
const signedHeadersString = 'host';
const payloadHash = 'UNSIGNED-PAYLOAD';
const canonicalRequest = [
method,
canonicalUri,
canonicalQuery,
canonicalHeaders,
signedHeadersString,
payloadHash,
].join('\n');
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
const stringToSign = [algorithm, amzDate, credentialScope, sha256(canonicalRequest)].join('\n');
const kDate = hmacSha256(`AWS4${secretKey}`, dateStamp);
const kRegion = hmacSha256(kDate, region);
const kService = hmacSha256(kRegion, service);
const kSigning = hmacSha256(kService, 'aws4_request');
const signature = hmacSha256(kSigning, stringToSign).toString('hex');
const baseUrl = `${endpointUrl.protocol}//${endpointUrl.host}`;
return `${baseUrl}${canonicalUri}?${canonicalQuery}&X-Amz-Signature=${signature}`;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,30 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {IS3Service} from '@fluxer/s3/src/s3/S3Service';
export interface HonoEnv {
Variables: {
s3Service: IS3Service;
accessKeyId?: string;
authenticated: boolean;
requestId: string;
virtualBucket?: string;
};
}

View File

@@ -0,0 +1,46 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createHash, createHmac} from 'node:crypto';
export function hmacSha256(key: string | Buffer, data: string): Buffer {
return createHmac('sha256', key).update(data, 'utf8').digest();
}
export function sha256(data: string | Buffer): string {
return createHash('sha256').update(data).digest('hex');
}
export function md5(data: string | Buffer): string {
return createHash('md5').update(data).digest('hex');
}
export function md5Base64(data: string | Buffer): string {
return createHash('md5').update(data).digest('base64');
}
export function randomHex(bytes: number): string {
const array = new Uint8Array(bytes);
crypto.getRandomValues(array);
return Array.from(array, (b) => b.toString(16).padStart(2, '0')).join('');
}
export function randomUUID(): string {
return crypto.randomUUID();
}

View File

@@ -0,0 +1,68 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
export function escapeXml(str: string): string {
return str
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;');
}
export function xmlTag(name: string, value: string | number | boolean | undefined | null, shouldEscape = true): string {
if (value === undefined || value === null) {
return '';
}
const stringValue = String(value);
return `<${name}>${shouldEscape ? escapeXml(stringValue) : stringValue}</${name}>`;
}
export function xmlHeader(): string {
return '<?xml version="1.0" encoding="UTF-8"?>\n';
}
export function formatISODate(date: Date): string {
return date.toISOString();
}
export function formatAmzDate(date: Date): string {
return date
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
}
export function parseAmzDate(dateStr: string): Date | null {
const match = dateStr.match(/^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})Z$/);
if (!match) {
return null;
}
const [, year, month, day, hour, minute, second] = match;
return new Date(
Date.UTC(
parseInt(year!, 10),
parseInt(month!, 10) - 1,
parseInt(day!, 10),
parseInt(hour!, 10),
parseInt(minute!, 10),
parseInt(second!, 10),
),
);
}