fix: various fixes to sentry-reported errors and more

This commit is contained in:
Hampus Kraft
2026-02-18 15:38:51 +00:00
parent 302c0d2a0c
commit 0517a966a3
357 changed files with 25420 additions and 16281 deletions

View File

@@ -166,7 +166,7 @@ export function createAdminApp(options: CreateAdminAppOptions): AdminAppResult {
}
});
app.onError(createAdminErrorHandler(logger, config.env === 'development'));
app.onError(createAdminErrorHandler(logger, config.env === 'development', config.basePath));
app.get('/_health', (c) => c.json({status: 'ok'}));

View File

@@ -33,6 +33,7 @@ import type {
LookupUserResponse,
UserAdminResponse,
} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import type {WebAuthnCredentialListResponse} from '@fluxer/schema/src/domains/auth/AuthSchemas';
export async function getCurrentAdmin(config: Config, session: Session): Promise<ApiResult<UserAdminResponse | null>> {
const client = new ApiClient(config, session);
@@ -417,3 +418,29 @@ export async function sendPasswordReset(
const client = new ApiClient(config, session);
return client.postVoid('/admin/users/send-password-reset', {user_id: userId}, auditLogReason);
}
export async function listWebAuthnCredentials(
config: Config,
session: Session,
userId: string,
): Promise<ApiResult<WebAuthnCredentialListResponse>> {
const client = new ApiClient(config, session);
return client.post<WebAuthnCredentialListResponse>('/admin/users/list-webauthn-credentials', {
user_id: userId,
});
}
export async function deleteWebAuthnCredential(
config: Config,
session: Session,
userId: string,
credentialId: string,
auditLogReason?: string,
): Promise<ApiResult<void>> {
const client = new ApiClient(config, session);
return client.postVoid(
'/admin/users/delete-webauthn-credential',
{user_id: userId, credential_id: credentialId},
auditLogReason,
);
}

View File

@@ -32,7 +32,13 @@ import type {Context, ErrorHandler} from 'hono';
const KNOWN_HTTP_STATUS_CODES: Array<HttpStatusCode> = Object.values(HttpStatus);
export function createAdminErrorHandler(logger: LoggerInterface, includeStack: boolean): ErrorHandler {
export function createAdminErrorHandler(
logger: LoggerInterface,
includeStack: boolean,
basePath: string,
): ErrorHandler {
const homeUrl = basePath || '/';
return createErrorHandler({
includeStack,
logError: (error, c) => {
@@ -55,9 +61,9 @@ export function createAdminErrorHandler(logger: LoggerInterface, includeStack: b
customHandler: (error, c) => {
const status = getStatus(error) ?? 500;
if (status === 404) {
return renderNotFound(c);
return renderNotFound(c, homeUrl);
}
return renderError(c, status);
return renderError(c, status, homeUrl);
},
});
}
@@ -67,7 +73,7 @@ function getStatus(error: Error): number | null {
return typeof statusValue === 'number' ? statusValue : null;
}
function renderNotFound(c: Context): Response | Promise<Response> {
function renderNotFound(c: Context, homeUrl: string): Response | Promise<Response> {
c.status(404);
return c.html(
<ErrorPage
@@ -75,13 +81,13 @@ function renderNotFound(c: Context): Response | Promise<Response> {
title="Page not found"
description="The page you are looking for does not exist or has been moved."
staticCdnEndpoint={CdnEndpoints.STATIC}
homeUrl="/admin"
homeUrl={homeUrl}
homeLabel="Go to admin"
/>,
);
}
function renderError(c: Context, status: number): Response | Promise<Response> {
function renderError(c: Context, status: number, homeUrl: string): Response | Promise<Response> {
const statusCode = isHttpStatusCode(status) ? status : HttpStatus.INTERNAL_SERVER_ERROR;
c.status(statusCode);
return c.html(
@@ -90,7 +96,7 @@ function renderError(c: Context, status: number): Response | Promise<Response> {
title="Something went wrong"
description="An unexpected error occurred. Please try again later."
staticCdnEndpoint={CdnEndpoints.STATIC}
homeUrl="/admin"
homeUrl={homeUrl}
homeLabel="Go to admin"
/>,
);

View File

@@ -105,6 +105,14 @@ const ReindexControls: FC<{config: Config; csrfToken: string}> = ({config, csrfT
<ReindexButton config={config} title="Guilds" indexType="guilds" csrfToken={csrfToken} />
<ReindexButton config={config} title="Reports" indexType="reports" csrfToken={csrfToken} />
<ReindexButton config={config} title="Audit Logs" indexType="audit_logs" csrfToken={csrfToken} />
<Heading level={3} class="subtitle mt-6 text-neutral-900">
Discovery Index
</Heading>
<Text color="muted" size="sm" class="mb-3">
Rebuilds the discovery search index for all approved discoverable communities. This syncs guild metadata,
descriptions, categories, and online counts.
</Text>
<ReindexButton config={config} title="Discovery Index" indexType="discovery" csrfToken={csrfToken} />
<Heading level={3} class="subtitle mt-6 text-neutral-900">
Guild-specific Search Indexes
</Heading>

View File

@@ -45,6 +45,7 @@ import type {
ListUserSessionsResponse,
UserAdminResponse,
} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import type {WebAuthnCredentialListResponse} from '@fluxer/schema/src/domains/auth/AuthSchemas';
import {BackButton, NotFoundView} from '@fluxer/ui/src/components/Navigation';
import {formatDiscriminator, getUserAvatarUrl, getUserBannerUrl} from '@fluxer/ui/src/utils/FormatUser';
import type {FC} from 'hono/jsx';
@@ -170,6 +171,7 @@ export const UserDetailPage: FC<UserDetailPageProps> = async ({
let sessionsResult: {ok: true; data: ListUserSessionsResponse} | {ok: false; error: ApiError} | null = null;
let guildsResult: {ok: true; data: ListUserGuildsResponse} | {ok: false; error: ApiError} | null = null;
let dmChannelsResult: {ok: true; data: ListUserDmChannelsResponse} | {ok: false; error: ApiError} | null = null;
let webAuthnCredentials: WebAuthnCredentialListResponse | null = null;
let messageShredStatusResult:
| {ok: true; data: messagesApi.MessageShredStatusResponse}
| {ok: false; error: ApiError}
@@ -182,6 +184,13 @@ export const UserDetailPage: FC<UserDetailPageProps> = async ({
if (activeTab === 'account') {
sessionsResult = await usersApi.listUserSessions(config, session, userId);
const hasWebAuthn = user.authenticator_types.includes(2);
if (hasWebAuthn) {
const credResult = await usersApi.listWebAuthnCredentials(config, session, userId);
if (credResult.ok) {
webAuthnCredentials = credResult.data;
}
}
}
if (activeTab === 'guilds') {
@@ -304,6 +313,7 @@ export const UserDetailPage: FC<UserDetailPageProps> = async ({
user={user}
userId={userId}
sessionsResult={sessionsResult}
webAuthnCredentials={webAuthnCredentials}
csrfToken={csrfToken}
/>
)}

View File

@@ -32,6 +32,10 @@ import type {
UserAdminResponse,
UserSessionResponse,
} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import type {
WebAuthnCredentialListResponse,
WebAuthnCredentialResponse,
} from '@fluxer/schema/src/domains/auth/AuthSchemas';
import {Button} from '@fluxer/ui/src/components/Button';
import {Card} from '@fluxer/ui/src/components/Card';
import {CsrfInput} from '@fluxer/ui/src/components/CsrfInput';
@@ -43,10 +47,18 @@ interface AccountTabProps {
user: UserAdminResponse;
userId: string;
sessionsResult: {ok: true; data: ListUserSessionsResponse} | {ok: false; error: ApiError} | null;
webAuthnCredentials: WebAuthnCredentialListResponse | null;
csrfToken: string;
}
export function AccountTab({config: _config, user, userId: _userId, sessionsResult, csrfToken}: AccountTabProps) {
export function AccountTab({
config: _config,
user,
userId: _userId,
sessionsResult,
webAuthnCredentials,
csrfToken,
}: AccountTabProps) {
return (
<VStack gap={6}>
<Card padding="md">
@@ -266,10 +278,82 @@ export function AccountTab({config: _config, user, userId: _userId, sessionsResu
</div>
</VStack>
</Card>
{webAuthnCredentials && webAuthnCredentials.length > 0 && (
<Card padding="md">
<VStack gap={4}>
<Heading level={2} size="base">
WebAuthn Credentials
</Heading>
<div class="overflow-x-auto">
<table class="w-full text-sm">
<thead>
<tr class="border-neutral-200 border-b text-left">
<th class="pb-2 font-medium text-neutral-600">Name</th>
<th class="pb-2 font-medium text-neutral-600">Created</th>
<th class="pb-2 font-medium text-neutral-600">Last Used</th>
<th class="pb-2 font-medium text-neutral-600" />
</tr>
</thead>
<tbody>
{webAuthnCredentials.map((credential) => (
<WebAuthnCredentialRow credential={credential} csrfToken={csrfToken} />
))}
</tbody>
</table>
</div>
</VStack>
</Card>
)}
</VStack>
);
}
const WebAuthnCredentialRow: FC<{credential: WebAuthnCredentialResponse; csrfToken: string}> = ({
credential,
csrfToken,
}) => {
function formatTimestamp(value: string): string {
const [datePart, timePartRaw] = value.split('T');
if (!datePart || !timePartRaw) return value;
const timePart = timePartRaw.replace('Z', '').split('.')[0] ?? timePartRaw;
return `${datePart} ${timePart}`;
}
return (
<tr class="border-neutral-100 border-b">
<td class="py-2 pr-4">
<Text size="sm" class="text-neutral-900">
{credential.name}
</Text>
</td>
<td class="py-2 pr-4">
<Text size="sm" class="text-neutral-900">
{formatTimestamp(credential.created_at)}
</Text>
</td>
<td class="py-2 pr-4">
<Text size="sm" class="text-neutral-900">
{credential.last_used_at ? formatTimestamp(credential.last_used_at) : 'Never'}
</Text>
</td>
<td class="py-2">
<form
method="post"
action="?action=delete_webauthn_credential&tab=account"
onsubmit={`return confirm('Are you sure you want to delete the WebAuthn credential "${credential.name}"?')`}
>
<CsrfInput token={csrfToken} />
<input type="hidden" name="credential_id" value={credential.id} />
<Button type="submit" variant="primary" size="small">
Delete
</Button>
</form>
</td>
</tr>
);
};
const SessionCard: FC<{session: UserSessionResponse}> = ({session}) => {
function formatSessionTimestamp(value: string): string {
const [datePart, timePartRaw] = value.split('T');

View File

@@ -462,6 +462,23 @@ export function createUsersRoutes({config, assetVersion, requireAuth}: RouteFact
});
}
case 'delete_webauthn_credential': {
const credentialId = getRequiredString(formData, 'credential_id');
if (!credentialId) {
return redirectWithFlash(c, redirectUrl, {
message: 'Credential ID is required',
type: 'error',
});
}
const result = await usersApi.deleteWebAuthnCredential(config, session, userId, credentialId);
return redirectWithFlash(c, redirectUrl, {
message: result.ok ? 'WebAuthn credential deleted successfully' : 'Failed to delete WebAuthn credential',
type: result.ok ? 'success' : 'error',
});
}
case 'send_password_reset': {
const result = await usersApi.sendPasswordReset(config, session, userId);
return redirectWithFlash(c, redirectUrl, {

View File

@@ -23,7 +23,7 @@
"@fluxer/date_utils": "workspace:*",
"@fluxer/email": "workspace:*",
"@fluxer/errors": "workspace:*",
"@fluxer/geo_utils": "workspace:*",
"@fluxer/geoip": "workspace:*",
"@fluxer/hono": "workspace:*",
"@fluxer/http_client": "workspace:*",
"@fluxer/ip_utils": "workspace:*",
@@ -32,10 +32,10 @@
"@fluxer/locale": "workspace:*",
"@fluxer/logger": "workspace:*",
"@fluxer/meilisearch_search": "workspace:*",
"@fluxer/nats": "workspace:*",
"@fluxer/media_proxy": "workspace:*",
"@fluxer/media_proxy_utils": "workspace:*",
"@fluxer/mime_utils": "workspace:*",
"@fluxer/queue": "workspace:*",
"@fluxer/rate_limit": "workspace:*",
"@fluxer/s3": "workspace:*",
"@fluxer/schema": "workspace:*",
@@ -65,7 +65,7 @@
"lodash": "catalog:",
"luxon": "catalog:",
"magic-bytes.js": "catalog:",
"maxmind": "catalog:",
"nats": "catalog:",
"mime": "catalog:",
"sharp": "catalog:",
"stripe": "catalog:",

View File

@@ -142,10 +142,10 @@ export function buildAPIConfigFromMaster(master: MasterConfig): APIConfig {
url: master.internal.kv,
},
gateway: {
rpcEndpoint: master.gateway.rpc_endpoint,
rpcSecret: master.gateway.rpc_secret,
rpcTcpPort: master.services.gateway.rpc_tcp_port,
nats: {
coreUrl: master.services.nats?.core_url ?? 'nats://127.0.0.1:4222',
jetStreamUrl: master.services.nats?.jetstream_url ?? 'nats://127.0.0.1:4223',
authToken: master.services.nats?.auth_token ?? '',
},
mediaProxy: {
@@ -392,8 +392,8 @@ export function buildAPIConfigFromMaster(master: MasterConfig): APIConfig {
},
queue: {
baseUrl: master.internal.queue,
authSecret: master.services.queue?.secret,
baseUrl: 'queue' in master.internal ? String(master.internal.queue) : 'http://localhost:8088/queue',
authSecret: 'queue' in master.services ? (master.services.queue as {secret?: string}).secret : undefined,
},
};
}

View File

@@ -99,10 +99,12 @@ import type {
ChangeEmailRequest,
ChangeUsernameRequest,
ClearUserFieldsRequest,
DeleteWebAuthnCredentialRequest,
DisableForSuspiciousActivityRequest,
DisableMfaRequest,
ListUserChangeLogRequest,
ListUserDmChannelsRequest,
ListWebAuthnCredentialsRequest,
LookupUserRequest,
SendPasswordResetRequest,
SetUserAclsRequest,
@@ -572,6 +574,22 @@ export class AdminService {
return this.userService.listUserSessions(userId, adminUserId, auditLogReason);
}
async listWebAuthnCredentials(
data: ListWebAuthnCredentialsRequest,
adminUserId: UserID,
auditLogReason: string | null,
) {
return this.userService.listWebAuthnCredentials(data, adminUserId, auditLogReason);
}
async deleteWebAuthnCredential(
data: DeleteWebAuthnCredentialRequest,
adminUserId: UserID,
auditLogReason: string | null,
) {
return this.userService.deleteWebAuthnCredential(data, adminUserId, auditLogReason);
}
async listUserDmChannels(data: ListUserDmChannelsRequest) {
return this.userService.listUserDmChannels(data);
}
@@ -781,7 +799,8 @@ export class AdminService {
| 'audit_logs'
| 'channel_messages'
| 'guild_members'
| 'favorite_memes';
| 'favorite_memes'
| 'discovery';
guild_id?: bigint;
user_id?: bigint;
},

View File

@@ -43,8 +43,8 @@ function mapDiscoveryRowToResponse(row: GuildDiscoveryRow) {
description: row.description,
category_type: row.category_type,
applied_at: row.applied_at.toISOString(),
reviewed_at: row.reviewed_at?.toISOString() ?? null,
review_reason: row.review_reason ?? null,
reviewed_at: row.removed_at?.toISOString() ?? row.reviewed_at?.toISOString() ?? null,
review_reason: row.removal_reason ?? row.review_reason ?? null,
};
}

View File

@@ -34,6 +34,7 @@ import {
ChangeEmailRequest,
ChangeUsernameRequest,
ClearUserFieldsRequest,
DeleteWebAuthnCredentialRequest,
DisableForSuspiciousActivityRequest,
DisableMfaRequest,
ListUserChangeLogRequest,
@@ -42,6 +43,7 @@ import {
ListUserDmChannelsResponse,
ListUserSessionsRequest,
ListUserSessionsResponse,
ListWebAuthnCredentialsRequest,
LookupUserRequest,
LookupUserResponse,
ScheduleAccountDeletionRequest,
@@ -59,6 +61,7 @@ import {
UserMutationResponse,
VerifyUserEmailRequest,
} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import {WebAuthnCredentialListResponse} from '@fluxer/schema/src/domains/auth/AuthSchemas';
export function UserAdminController(app: HonoApp) {
app.get(
@@ -170,6 +173,53 @@ export function UserAdminController(app: HonoApp) {
},
);
app.post(
'/admin/users/list-webauthn-credentials',
RateLimitMiddleware(RateLimitConfigs.ADMIN_USER_MODIFY),
requireAdminACL(AdminACLs.USER_UPDATE_MFA),
Validator('json', ListWebAuthnCredentialsRequest),
OpenAPI({
operationId: 'list_user_webauthn_credentials',
summary: 'List user WebAuthn credentials',
responseSchema: WebAuthnCredentialListResponse,
statusCode: 200,
security: 'adminApiKey',
tags: 'Admin',
description:
'List all WebAuthn credentials (passkeys/security keys) registered for a user. Returns credential names, creation dates, and last usage. Creates audit log entry. Requires USER_UPDATE_MFA permission.',
}),
async (ctx) => {
const adminService = ctx.get('adminService');
const adminUserId = ctx.get('adminUserId');
const auditLogReason = ctx.get('auditLogReason');
return ctx.json(await adminService.listWebAuthnCredentials(ctx.req.valid('json'), adminUserId, auditLogReason));
},
);
app.post(
'/admin/users/delete-webauthn-credential',
RateLimitMiddleware(RateLimitConfigs.ADMIN_USER_MODIFY),
requireAdminACL(AdminACLs.USER_UPDATE_MFA),
Validator('json', DeleteWebAuthnCredentialRequest),
OpenAPI({
operationId: 'delete_user_webauthn_credential',
summary: 'Delete user WebAuthn credential',
responseSchema: null,
statusCode: 204,
security: 'adminApiKey',
tags: 'Admin',
description:
'Delete a specific WebAuthn credential (passkey/security key) from a user account. Creates audit log entry. Requires USER_UPDATE_MFA permission.',
}),
async (ctx) => {
const adminService = ctx.get('adminService');
const adminUserId = ctx.get('adminUserId');
const auditLogReason = ctx.get('auditLogReason');
await adminService.deleteWebAuthnCredential(ctx.req.valid('json'), adminUserId, auditLogReason);
return ctx.body(null, 204);
},
);
app.post(
'/admin/users/clear-fields',
RateLimitMiddleware(RateLimitConfigs.ADMIN_USER_MODIFY),

View File

@@ -19,8 +19,9 @@
import dns from 'node:dns';
import type {User} from '@fluxer/api/src/models/User';
import {formatGeoipLocation, lookupGeoip} from '@fluxer/api/src/utils/IpUtils';
import {lookupGeoip} from '@fluxer/api/src/utils/IpUtils';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {formatGeoipLocation} from '@fluxer/geoip/src/GeoipLookup';
import type {UserAdminResponse} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import {seconds} from 'itty-time';

View File

@@ -31,7 +31,15 @@ import type {IWorkerService} from '@fluxer/worker/src/contracts/IWorkerService';
import type {WorkerJobPayload} from '@fluxer/worker/src/contracts/WorkerTypes';
interface RefreshSearchIndexJobPayload extends WorkerJobPayload {
index_type: 'guilds' | 'users' | 'reports' | 'audit_logs' | 'channel_messages' | 'favorite_memes' | 'guild_members';
index_type:
| 'guilds'
| 'users'
| 'reports'
| 'audit_logs'
| 'channel_messages'
| 'favorite_memes'
| 'guild_members'
| 'discovery';
admin_user_id: string;
audit_log_reason: string | null;
job_id: string;
@@ -147,7 +155,8 @@ export class AdminSearchService {
| 'audit_logs'
| 'channel_messages'
| 'guild_members'
| 'favorite_memes';
| 'favorite_memes'
| 'discovery';
guild_id?: bigint;
user_id?: bigint;
},

View File

@@ -37,8 +37,10 @@ import {ServiceUnavailableError} from '@fluxer/errors/src/domains/core/ServiceUn
import {UnknownUserError} from '@fluxer/errors/src/domains/user/UnknownUserError';
import type {
BulkUpdateUserFlagsRequest,
DeleteWebAuthnCredentialRequest,
DisableForSuspiciousActivityRequest,
DisableMfaRequest,
ListWebAuthnCredentialsRequest,
SendPasswordResetRequest,
SetUserAclsRequest,
SetUserTraitsRequest,
@@ -46,6 +48,7 @@ import type {
UnlinkPhoneRequest,
UpdateSuspiciousActivityFlagsRequest,
} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import type {WebAuthnCredentialListResponse} from '@fluxer/schema/src/domains/auth/AuthSchemas';
interface AdminUserSecurityServiceDeps {
userRepository: IUserRepository;
@@ -430,6 +433,66 @@ export class AdminUserSecurityService {
};
}
async listWebAuthnCredentials(
data: ListWebAuthnCredentialsRequest,
adminUserId: UserID,
auditLogReason: string | null,
): Promise<WebAuthnCredentialListResponse> {
const {userRepository, auditService} = this.deps;
const userId = createUserID(data.user_id);
const user = await userRepository.findUnique(userId);
if (!user) {
throw new UnknownUserError();
}
const credentials = await userRepository.listWebAuthnCredentials(userId);
await auditService.createAuditLog({
adminUserId,
targetType: 'user',
targetId: BigInt(userId),
action: 'list_webauthn_credentials',
auditLogReason,
metadata: new Map([['credential_count', credentials.length.toString()]]),
});
return credentials.map((cred) => ({
id: cred.credentialId,
name: cred.name,
created_at: cred.createdAt.toISOString(),
last_used_at: cred.lastUsedAt?.toISOString() ?? null,
}));
}
async deleteWebAuthnCredential(
data: DeleteWebAuthnCredentialRequest,
adminUserId: UserID,
auditLogReason: string | null,
) {
const {userRepository, auditService} = this.deps;
const userId = createUserID(data.user_id);
const user = await userRepository.findUnique(userId);
if (!user) {
throw new UnknownUserError();
}
const credential = await userRepository.getWebAuthnCredential(userId, data.credential_id);
if (!credential) {
throw new UnknownUserError();
}
await userRepository.deleteWebAuthnCredential(userId, data.credential_id);
await auditService.createAuditLog({
adminUserId,
targetType: 'user',
targetId: BigInt(userId),
action: 'delete_webauthn_credential',
auditLogReason,
metadata: new Map([['credential_id', data.credential_id]]),
});
}
async listUserSessions(userId: bigint, adminUserId: UserID, auditLogReason: string | null) {
const {userRepository, auditService, cacheService} = this.deps;
const userIdTyped = createUserID(userId);

View File

@@ -48,10 +48,12 @@ import type {
ChangeEmailRequest,
ChangeUsernameRequest,
ClearUserFieldsRequest,
DeleteWebAuthnCredentialRequest,
DisableForSuspiciousActivityRequest,
DisableMfaRequest,
ListUserChangeLogRequest,
ListUserDmChannelsRequest,
ListWebAuthnCredentialsRequest,
LookupUserRequest,
ScheduleAccountDeletionRequest,
SendPasswordResetRequest,
@@ -259,6 +261,22 @@ export class AdminUserService {
return this.securityService.listUserSessions(userId, adminUserId, auditLogReason);
}
async listWebAuthnCredentials(
data: ListWebAuthnCredentialsRequest,
adminUserId: UserID,
auditLogReason: string | null,
) {
return this.securityService.listWebAuthnCredentials(data, adminUserId, auditLogReason);
}
async deleteWebAuthnCredential(
data: DeleteWebAuthnCredentialRequest,
adminUserId: UserID,
auditLogReason: string | null,
) {
return this.securityService.deleteWebAuthnCredential(data, adminUserId, auditLogReason);
}
async listUserDmChannels(data: ListUserDmChannelsRequest) {
const userId = createUserID(data.user_id);
const user = await this.userRepository.findUnique(userId);

View File

@@ -53,6 +53,8 @@ const adminEndpoints = [
{method: 'POST', path: '/admin/users/list-guilds', requiredACL: 'user:lookup'},
{method: 'POST', path: '/admin/users/list-dm-channels', requiredACL: 'user:list:dm_channels'},
{method: 'POST', path: '/admin/users/disable-mfa', requiredACL: 'user:update'},
{method: 'POST', path: '/admin/users/list-webauthn-credentials', requiredACL: 'user:update:mfa'},
{method: 'POST', path: '/admin/users/delete-webauthn-credential', requiredACL: 'user:update:mfa'},
{method: 'POST', path: '/admin/users/clear-fields', requiredACL: 'user:update'},
{method: 'POST', path: '/admin/users/set-bot-status', requiredACL: 'user:update'},
{method: 'POST', path: '/admin/users/set-acls', requiredACL: 'acl:set:user'},

View File

@@ -0,0 +1,565 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createTestAccount, setUserACLs} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {createGuild} from '@fluxer/api/src/channel/tests/ChannelTestUtils';
import {type ApiTestHarness, createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {HTTP_STATUS} from '@fluxer/api/src/test/TestConstants';
import {createBuilder, createBuilderWithoutAuth} from '@fluxer/api/src/test/TestRequestBuilder';
import type {GuildAdminResponse} from '@fluxer/schema/src/domains/admin/AdminGuildSchemas';
import type {UserAdminResponse} from '@fluxer/schema/src/domains/admin/AdminUserSchemas';
import {afterEach, beforeEach, describe, expect, test} from 'vitest';
interface UserSearchResponse {
users: Array<UserAdminResponse>;
total: number;
}
interface GuildSearchResponse {
guilds: Array<GuildAdminResponse>;
total: number;
}
async function setContactInfo(
harness: ApiTestHarness,
userId: string,
data: {phone?: string | null; email?: string | null},
): Promise<void> {
await createBuilderWithoutAuth(harness)
.post(`/test/users/${userId}/set-contact-info`)
.body(data)
.expect(HTTP_STATUS.OK)
.execute();
}
describe('Admin Search Field Coverage', () => {
let harness: ApiTestHarness;
beforeEach(async () => {
harness = await createApiTestHarness({search: 'meilisearch'});
});
afterEach(async () => {
await harness.shutdown();
});
describe('user search by email', () => {
test('searching by exact email returns only the matching user', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const uniqueEmail = `precise-email-${Date.now()}@searchtest.example`;
const targetUser = await createTestAccount(harness, {email: uniqueEmail});
await createTestAccount(harness, {email: `other-user-${Date.now()}@different.example`});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: uniqueEmail, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
expect(found!.email).toBe(uniqueEmail);
});
test('searching by email does not return users with different emails', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const emailA = `alpha-${Date.now()}@emailsearch.example`;
const emailB = `bravo-${Date.now()}@emailsearch.example`;
const userA = await createTestAccount(harness, {email: emailA});
const userB = await createTestAccount(harness, {email: emailB});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: emailA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const foundA = result.users.find((u) => u.id === userA.userId);
const foundB = result.users.find((u) => u.id === userB.userId);
expect(foundA).toBeDefined();
expect(foundB).toBeUndefined();
});
test('searching by partial email domain returns matching users', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const domain = `partialdomain${Date.now()}.example`;
const user = await createTestAccount(harness, {email: `user@${domain}`});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: domain, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === user.userId);
expect(found).toBeDefined();
});
});
describe('user search by username', () => {
test('searching by exact username returns only the matching user', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const uniqueUsername = `xuniq_${Date.now()}`;
const targetUser = await createTestAccount(harness, {username: uniqueUsername});
await createTestAccount(harness, {username: `yother_${Date.now()}`});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: uniqueUsername, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
expect(found!.username).toBe(uniqueUsername);
});
test('searching by username does not return users with completely different usernames', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const ts = Date.now();
const usernameA = `zephyrfox_${ts}`;
const usernameB = `quasarmoon_${ts}`;
const userA = await createTestAccount(harness, {username: usernameA});
const userB = await createTestAccount(harness, {username: usernameB});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: usernameA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const foundA = result.users.find((u) => u.id === userA.userId);
const foundB = result.users.find((u) => u.id === userB.userId);
expect(foundA).toBeDefined();
expect(foundB).toBeUndefined();
});
});
describe('user search by user ID', () => {
test('searching by exact user ID returns only the matching user', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const targetUser = await createTestAccount(harness);
await createTestAccount(harness);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: targetUser.userId, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
expect(found!.id).toBe(targetUser.userId);
});
test('user ID search returns the user even when meilisearch has no match via direct DB lookup', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const targetUser = await createTestAccount(harness);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: targetUser.userId, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
});
test('user ID search does not duplicate user when found by both meilisearch and direct lookup', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const targetUser = await createTestAccount(harness);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: targetUser.userId, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const matchingUsers = result.users.filter((u) => u.id === targetUser.userId);
expect(matchingUsers).toHaveLength(1);
});
});
describe('user search by phone number', () => {
test('searching by phone number returns only the matching user', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const phone = `+1555${Date.now().toString().slice(-7)}`;
const targetUser = await createTestAccount(harness);
await setContactInfo(harness, targetUser.userId, {phone});
const otherUser = await createTestAccount(harness);
await setContactInfo(harness, otherUser.userId, {phone: `+4420${Date.now().toString().slice(-7)}`});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: phone, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
expect(found!.phone).toBe(phone);
const otherFound = result.users.find((u) => u.id === otherUser.userId);
expect(otherFound).toBeUndefined();
});
test('searching by phone does not return users without a phone', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const phone = `+1888${Date.now().toString().slice(-7)}`;
const userWithPhone = await createTestAccount(harness);
await setContactInfo(harness, userWithPhone.userId, {phone});
const userWithoutPhone = await createTestAccount(harness);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: phone, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const foundWithPhone = result.users.find((u) => u.id === userWithPhone.userId);
const foundWithoutPhone = result.users.find((u) => u.id === userWithoutPhone.userId);
expect(foundWithPhone).toBeDefined();
expect(foundWithoutPhone).toBeUndefined();
});
});
describe('user search response fields', () => {
test('user search response includes all expected admin fields', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const email = `fields-check-${Date.now()}@fieldtest.example`;
const username = `fieldcheck_${Date.now()}`;
const targetUser = await createTestAccount(harness, {email, username});
const phone = `+1999${Date.now().toString().slice(-7)}`;
await setContactInfo(harness, targetUser.userId, {phone});
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: email, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const found = result.users.find((u) => u.id === targetUser.userId);
expect(found).toBeDefined();
expect(found!.id).toBe(targetUser.userId);
expect(found!.username).toBe(username);
expect(found!.email).toBe(email);
expect(found!.phone).toBe(phone);
expect(found!).toHaveProperty('discriminator');
expect(found!).toHaveProperty('global_name');
expect(found!).toHaveProperty('flags');
expect(found!).toHaveProperty('email_verified');
expect(found!).toHaveProperty('email_bounced');
expect(found!).toHaveProperty('premium_type');
expect(found!).toHaveProperty('acls');
expect(found!).toHaveProperty('suspicious_activity_flags');
});
});
describe('user search isolation across fields', () => {
test('each searchable field returns only the correct user', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const ts = Date.now();
const emailA = `vortexfind-${ts}@alphadomain.example`;
const emailB = `nebulaseek-${ts}@betadomain.example`;
const usernameA = `vortexfox_${ts}`;
const usernameB = `nebulawolf_${ts}`;
const phoneA = `+33612345${ts.toString().slice(-4)}`;
const phoneB = `+81907654${ts.toString().slice(-4)}`;
const userA = await createTestAccount(harness, {email: emailA, username: usernameA});
const userB = await createTestAccount(harness, {email: emailB, username: usernameB});
await setContactInfo(harness, userA.userId, {phone: phoneA});
await setContactInfo(harness, userB.userId, {phone: phoneB});
const searchByEmailA = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: emailA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(searchByEmailA.users.find((u) => u.id === userA.userId)).toBeDefined();
expect(searchByEmailA.users.find((u) => u.id === userB.userId)).toBeUndefined();
const searchByUsernameB = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: usernameB, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(searchByUsernameB.users.find((u) => u.id === userB.userId)).toBeDefined();
expect(searchByUsernameB.users.find((u) => u.id === userA.userId)).toBeUndefined();
const searchByPhoneA = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: phoneA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(searchByPhoneA.users.find((u) => u.id === userA.userId)).toBeDefined();
expect(searchByPhoneA.users.find((u) => u.id === userB.userId)).toBeUndefined();
const searchByIdB = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: userB.userId, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(searchByIdB.users.find((u) => u.id === userB.userId)).toBeDefined();
expect(searchByIdB.users.find((u) => u.id === userA.userId)).toBeUndefined();
});
});
describe('guild search by name', () => {
test('searching by exact guild name returns only the matching guild', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const ts = Date.now();
const nameA = `Zephyrion Stronghold ${ts}`;
const nameB = `Quasarwave Citadel ${ts}`;
const guildA = await createGuild(harness, admin.token, nameA);
const guildB = await createGuild(harness, admin.token, nameB);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: nameA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const foundA = result.guilds.find((g) => g.id === guildA.id);
const foundB = result.guilds.find((g) => g.id === guildB.id);
expect(foundA).toBeDefined();
expect(foundA!.name).toBe(nameA);
expect(foundB).toBeUndefined();
});
test('searching by guild name does not return guilds with different names', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const ts = Date.now();
const uniqueNameA = `Xanthium ${ts}`;
const uniqueNameB = `Ytterbium ${ts}`;
const guildA = await createGuild(harness, admin.token, uniqueNameA);
const guildB = await createGuild(harness, admin.token, uniqueNameB);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: uniqueNameA, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const foundA = result.guilds.find((g) => g.id === guildA.id);
const foundB = result.guilds.find((g) => g.id === guildB.id);
expect(foundA).toBeDefined();
expect(foundB).toBeUndefined();
});
});
describe('guild search by ID', () => {
test('searching by exact guild ID returns only the matching guild', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const guildA = await createGuild(harness, admin.token, `ID Search A ${Date.now()}`);
const guildB = await createGuild(harness, admin.token, `ID Search B ${Date.now()}`);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: guildA.id, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
const foundA = result.guilds.find((g) => g.id === guildA.id);
const foundB = result.guilds.find((g) => g.id === guildB.id);
expect(foundA).toBeDefined();
expect(foundA!.id).toBe(guildA.id);
expect(foundB).toBeUndefined();
});
test('guild ID search at non-zero offset does not perform direct DB lookup', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const guild = await createGuild(harness, admin.token, `Offset Guild ${Date.now()}`);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: guild.id, limit: 10, offset: 1})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.guilds).toEqual([]);
expect(result.total).toBe(0);
});
});
describe('guild search response fields', () => {
test('guild search response includes all expected admin fields', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const guildName = `Field Check Guild ${Date.now()}`;
const guild = await createGuild(harness, admin.token, guildName);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: guildName, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const found = result.guilds.find((g) => g.id === guild.id);
expect(found).toBeDefined();
expect(found!.id).toBe(guild.id);
expect(found!.name).toBe(guildName);
expect(found!.owner_id).toBe(admin.userId);
expect(found!).toHaveProperty('features');
expect(found!).toHaveProperty('icon');
expect(found!).toHaveProperty('banner');
expect(found!).toHaveProperty('member_count');
expect(found!.member_count).toBeGreaterThanOrEqual(1);
});
test('guild search returns correct owner_id for each guild', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const otherOwner = await createTestAccount(harness);
const ts = Date.now();
const guildByAdmin = await createGuild(harness, admin.token, `Admin Owned ${ts}`);
const guildByOther = await createGuild(harness, otherOwner.token, `Other Owned ${ts}`);
const resultAdmin = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: `Admin Owned ${ts}`, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const foundAdmin = resultAdmin.guilds.find((g) => g.id === guildByAdmin.id);
expect(foundAdmin).toBeDefined();
expect(foundAdmin!.owner_id).toBe(admin.userId);
const resultOther = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: `Other Owned ${ts}`, limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
const foundOther = resultOther.guilds.find((g) => g.id === guildByOther.id);
expect(foundOther).toBeDefined();
expect(foundOther!.owner_id).toBe(otherOwner.userId);
});
});
describe('omitted and no-match queries', () => {
test('omitted query on user search returns users', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
await createTestAccount(harness);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
expect(result.users.length).toBeGreaterThanOrEqual(1);
});
test('omitted query on guild search returns guilds', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
await createGuild(harness, admin.token, `Omitted Query Guild ${Date.now()}`);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.total).toBeGreaterThanOrEqual(1);
expect(result.guilds.length).toBeGreaterThanOrEqual(1);
});
test('completely unrelated query returns no users', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'user:lookup']);
const result = await createBuilder<UserSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/users/search')
.body({query: 'zzz-impossible-match-query-xyzzy-99999', limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.users).toEqual([]);
expect(result.total).toBe(0);
});
test('completely unrelated query returns no guilds', async () => {
const admin = await createTestAccount(harness);
await setUserACLs(harness, admin, ['admin:authenticate', 'guild:lookup']);
const result = await createBuilder<GuildSearchResponse>(harness, `Bearer ${admin.token}`)
.post('/admin/guilds/search')
.body({query: 'zzz-impossible-match-query-xyzzy-99999', limit: 10, offset: 0})
.expect(HTTP_STATUS.OK)
.execute();
expect(result.guilds).toEqual([]);
expect(result.total).toBe(0);
});
});
});

View File

@@ -26,13 +26,25 @@ import {initializeMetricsService} from '@fluxer/api/src/infrastructure/MetricsSe
import {InstanceConfigRepository} from '@fluxer/api/src/instance/InstanceConfigRepository';
import {ipBanCache} from '@fluxer/api/src/middleware/IpBanMiddleware';
import {initializeServiceSingletons} from '@fluxer/api/src/middleware/ServiceMiddleware';
import {ensureVoiceResourcesInitialized, getKVClient} from '@fluxer/api/src/middleware/ServiceRegistry';
import {
ensureVoiceResourcesInitialized,
getKVClient,
setInjectedWorkerService,
} from '@fluxer/api/src/middleware/ServiceRegistry';
import {ReportRepository} from '@fluxer/api/src/report/ReportRepository';
import {NatsApiRpcListener} from '@fluxer/api/src/rpc/NatsApiRpcListener';
import {initializeSearch, shutdownSearch} from '@fluxer/api/src/SearchFactory';
import {warmupAdminSearchIndexes} from '@fluxer/api/src/search/SearchWarmup';
import {VisionarySlotInitializer} from '@fluxer/api/src/stripe/VisionarySlotInitializer';
import {UserRepository} from '@fluxer/api/src/user/repositories/UserRepository';
import {VoiceDataInitializer} from '@fluxer/api/src/voice/VoiceDataInitializer';
import {JetStreamWorkerQueue} from '@fluxer/api/src/worker/JetStreamWorkerQueue';
import {WorkerService} from '@fluxer/api/src/worker/WorkerService';
import {JetStreamConnectionManager} from '@fluxer/nats/src/JetStreamConnectionManager';
import {NatsConnectionManager} from '@fluxer/nats/src/NatsConnectionManager';
let natsRpcListener: NatsApiRpcListener | null = null;
let jsConnectionManager: JetStreamConnectionManager | null = null;
export function createInitializer(config: APIConfig, logger: ILogger): () => Promise<void> {
return async (): Promise<void> => {
@@ -49,6 +61,19 @@ export function createInitializer(config: APIConfig, logger: ILogger): () => Pro
await initializeServiceSingletons();
logger.info('Service singletons initialized');
if (!config.dev.testModeEnabled) {
jsConnectionManager = new JetStreamConnectionManager({
url: config.nats.jetStreamUrl,
token: config.nats.authToken || undefined,
name: 'api-worker',
});
await jsConnectionManager.connect();
const workerQueue = new JetStreamWorkerQueue(jsConnectionManager);
await workerQueue.ensureInfrastructure();
setInjectedWorkerService(new WorkerService(workerQueue));
logger.info('JetStream worker service initialized');
}
try {
const userRepository = new UserRepository();
const kvDeletionQueue = new KVAccountDeletionQueueService(kvClient, userRepository);
@@ -122,6 +147,16 @@ export function createInitializer(config: APIConfig, logger: ILogger): () => Pro
}
}
if (!config.dev.testModeEnabled) {
const connectionManager = new NatsConnectionManager({
url: config.nats.coreUrl,
token: config.nats.authToken,
name: 'api-rpc-listener',
});
natsRpcListener = new NatsApiRpcListener(connectionManager, logger);
await natsRpcListener.start();
}
logger.info('API service initialization complete');
};
}
@@ -130,6 +165,26 @@ export function createShutdown(logger: ILogger): () => Promise<void> {
return async (): Promise<void> => {
logger.info('Shutting down API service...');
if (natsRpcListener) {
try {
await natsRpcListener.stop();
natsRpcListener = null;
} catch (error) {
logger.error({error}, 'Error shutting down NATS API RPC listener');
}
}
if (jsConnectionManager) {
try {
await jsConnectionManager.drain();
jsConnectionManager = null;
} catch (error) {
logger.error({error}, 'Error draining JetStream worker connection');
}
}
setInjectedWorkerService(undefined);
try {
await shutdownSearch();
logger.info('Search service shut down');

View File

@@ -36,7 +36,6 @@ import {OAuth2Controller} from '@fluxer/api/src/oauth/OAuth2Controller';
import {registerPackControllers} from '@fluxer/api/src/pack/controllers';
import {ReadStateController} from '@fluxer/api/src/read_state/ReadStateController';
import {ReportController} from '@fluxer/api/src/report/ReportController';
import {RpcController} from '@fluxer/api/src/rpc/RpcController';
import {SearchController} from '@fluxer/api/src/search/controllers/SearchController';
import {StripeController} from '@fluxer/api/src/stripe/StripeController';
import {TenorController} from '@fluxer/api/src/tenor/TenorController';
@@ -60,7 +59,6 @@ export function registerControllers(routes: HonoApp, config: APIConfig): void {
registerPackControllers(routes);
ReadStateController(routes);
ReportController(routes);
RpcController(routes);
GuildController(routes);
SearchController(routes);
KlipyController(routes);

View File

@@ -35,7 +35,7 @@ import type {AuthSession} from '@fluxer/api/src/models/AuthSession';
import type {User} from '@fluxer/api/src/models/User';
import {withBusinessSpan} from '@fluxer/api/src/telemetry/BusinessSpans';
import type {IUserRepository} from '@fluxer/api/src/user/IUserRepository';
import {formatGeoipLocation, lookupGeoip, UNKNOWN_LOCATION} from '@fluxer/api/src/utils/IpUtils';
import {lookupGeoip} from '@fluxer/api/src/utils/IpUtils';
import * as RandomUtils from '@fluxer/api/src/utils/RandomUtils';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {UserAuthenticatorTypes, UserFlags} from '@fluxer/constants/src/UserConstants';
@@ -47,6 +47,7 @@ import {IpAuthorizationResendLimitExceededError} from '@fluxer/errors/src/domain
import {InputValidationError} from '@fluxer/errors/src/domains/core/InputValidationError';
import {RateLimitError} from '@fluxer/errors/src/domains/core/RateLimitError';
import {UnknownUserError} from '@fluxer/errors/src/domains/user/UnknownUserError';
import {formatGeoipLocation, UNKNOWN_LOCATION} from '@fluxer/geoip/src/GeoipLookup';
import {requireClientIp} from '@fluxer/ip_utils/src/ClientIp';
import type {IRateLimitService, RateLimitResult} from '@fluxer/rate_limit/src/IRateLimitService';
import type {LoginRequest} from '@fluxer/schema/src/domains/auth/AuthSchemas';

View File

@@ -38,13 +38,7 @@ import {withBusinessSpan} from '@fluxer/api/src/telemetry/BusinessSpans';
import type {IUserRepository} from '@fluxer/api/src/user/IUserRepository';
import * as AgeUtils from '@fluxer/api/src/utils/AgeUtils';
import * as FetchUtils from '@fluxer/api/src/utils/FetchUtils';
import {
formatGeoipLocation,
type GeoipResult,
getIpAddressReverse,
lookupGeoip,
UNKNOWN_LOCATION,
} from '@fluxer/api/src/utils/IpUtils';
import {getIpAddressReverse, lookupGeoip} from '@fluxer/api/src/utils/IpUtils';
import {generateRandomUsername} from '@fluxer/api/src/utils/UsernameGenerator';
import {deriveUsernameFromDisplayName} from '@fluxer/api/src/utils/UsernameSuggestionUtils';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
@@ -53,6 +47,7 @@ import {UserFlags} from '@fluxer/constants/src/UserConstants';
import type {IEmailService} from '@fluxer/email/src/IEmailService';
import {InputValidationError} from '@fluxer/errors/src/domains/core/InputValidationError';
import {RateLimitError} from '@fluxer/errors/src/domains/core/RateLimitError';
import {formatGeoipLocation, type GeoipResult, UNKNOWN_LOCATION} from '@fluxer/geoip/src/GeoipLookup';
import {requireClientIp} from '@fluxer/ip_utils/src/ClientIp';
import {parseAcceptLanguage} from '@fluxer/locale/src/LocaleService';
import type {IRateLimitService, RateLimitResult} from '@fluxer/rate_limit/src/IRateLimitService';

View File

@@ -17,8 +17,9 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {BlueskyCallbackResult} from '@fluxer/api/src/bluesky/IBlueskyOAuthService';
import type {BlueskyAuthorizeResult, BlueskyCallbackResult} from '@fluxer/api/src/bluesky/IBlueskyOAuthService';
import {Config} from '@fluxer/api/src/Config';
import {BlueskyOAuthAuthorizationFailedError} from '@fluxer/api/src/connection/errors/BlueskyOAuthAuthorizationFailedError';
import {BlueskyOAuthCallbackFailedError} from '@fluxer/api/src/connection/errors/BlueskyOAuthCallbackFailedError';
import {BlueskyOAuthNotEnabledError} from '@fluxer/api/src/connection/errors/BlueskyOAuthNotEnabledError';
import {BlueskyOAuthStateInvalidError} from '@fluxer/api/src/connection/errors/BlueskyOAuthStateInvalidError';
@@ -36,6 +37,15 @@ import {
BlueskyAuthorizeResponse,
} from '@fluxer/schema/src/domains/connection/BlueskyOAuthSchemas';
const BLUESKY_PROFILE_URL_RE = /^https?:\/\/bsky\.app\/profile\//i;
function normalizeBlueskyHandle(input: string): string {
let handle = input.trim();
handle = handle.replace(BLUESKY_PROFILE_URL_RE, '');
handle = handle.replace(/^@/, '');
return handle;
}
export function BlueskyOAuthController(app: HonoApp) {
app.get('/connections/bluesky/client-metadata.json', async (ctx) => {
const service = ctx.get('blueskyOAuthService');
@@ -73,8 +83,9 @@ export function BlueskyOAuthController(app: HonoApp) {
if (!service) {
throw new BlueskyOAuthNotEnabledError();
}
const {handle} = ctx.req.valid('json');
const {handle: rawHandle} = ctx.req.valid('json');
const userId = ctx.get('user').id;
const handle = normalizeBlueskyHandle(rawHandle);
const connectionService = ctx.get('connectionService');
const connections = await connectionService.getConnectionsForUser(userId);
@@ -86,7 +97,13 @@ export function BlueskyOAuthController(app: HonoApp) {
throw new ConnectionAlreadyExistsError();
}
const result = await service.authorize(handle, userId);
let result: BlueskyAuthorizeResult;
try {
result = await service.authorize(handle, userId);
} catch (error) {
Logger.error({error, handle}, 'Bluesky OAuth authorize failed');
throw new BlueskyOAuthAuthorizationFailedError();
}
return ctx.json({authorize_url: result.authorizeUrl});
},
);

View File

@@ -65,7 +65,7 @@ export class BlueskyOAuthService implements IBlueskyOAuthService {
clientMetadata: {
client_id: `${baseUrl}/connections/bluesky/client-metadata.json`,
client_name: config.client_name,
client_uri: config.client_uri || baseUrl,
client_uri: baseUrl,
logo_uri: config.logo_uri || undefined,
tos_uri: config.tos_uri || undefined,
policy_uri: config.policy_uri || undefined,

View File

@@ -20,6 +20,7 @@
import {createTestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {createUserID} from '@fluxer/api/src/BrandedTypes';
import {
createBlueskyConnectionViaOAuth,
createBlueskyDid,
createBlueskyHandle,
listConnections,
@@ -122,6 +123,69 @@ describe('Bluesky OAuth', () => {
const callArgs = harness.mockBlueskyOAuthService.authorizeSpy.mock.calls[0];
expect(callArgs[0]).toBe(handle);
});
it('normalises a bsky.app profile URL to a handle', async () => {
const account = await createTestAccount(harness);
await createBuilder(harness, account.token)
.post('/users/@me/connections/bluesky/authorize')
.body({handle: 'https://bsky.app/profile/alice.bsky.social'})
.expect(200)
.execute();
expect(harness.mockBlueskyOAuthService.authorizeSpy).toHaveBeenCalledTimes(1);
expect(harness.mockBlueskyOAuthService.authorizeSpy.mock.calls[0][0]).toBe('alice.bsky.social');
});
it('normalises an http bsky.app profile URL to a handle', async () => {
const account = await createTestAccount(harness);
await createBuilder(harness, account.token)
.post('/users/@me/connections/bluesky/authorize')
.body({handle: 'http://bsky.app/profile/someone.bsky.social'})
.expect(200)
.execute();
expect(harness.mockBlueskyOAuthService.authorizeSpy.mock.calls[0][0]).toBe('someone.bsky.social');
});
it('strips leading @ from handle', async () => {
const account = await createTestAccount(harness);
await createBuilder(harness, account.token)
.post('/users/@me/connections/bluesky/authorize')
.body({handle: '@alice.bsky.social'})
.expect(200)
.execute();
expect(harness.mockBlueskyOAuthService.authorizeSpy.mock.calls[0][0]).toBe('alice.bsky.social');
});
it('detects duplicate after normalising profile URL', async () => {
const account = await createTestAccount(harness);
const handle = createBlueskyHandle('testuser');
const did = createBlueskyDid('testuser');
const userId = createUserID(BigInt(account.userId));
await createBlueskyConnectionViaOAuth(harness, account.token, handle, did, userId);
await createBuilder(harness, account.token)
.post('/users/@me/connections/bluesky/authorize')
.body({handle: `https://bsky.app/profile/${handle}`})
.expect(HTTP_STATUS.CONFLICT, APIErrorCodes.CONNECTION_ALREADY_EXISTS)
.execute();
});
it('returns BLUESKY_OAUTH_AUTHORIZATION_FAILED when authorize throws', async () => {
const account = await createTestAccount(harness);
harness.mockBlueskyOAuthService.configure({shouldFailAuthorize: true});
await createBuilder(harness, account.token)
.post('/users/@me/connections/bluesky/authorize')
.body({handle: 'invalid-handle'})
.expect(HTTP_STATUS.BAD_REQUEST, APIErrorCodes.BLUESKY_OAUTH_AUTHORIZATION_FAILED)
.execute();
});
});
describe('GET /connections/bluesky/callback', () => {

View File

@@ -0,0 +1,119 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createChannelID} from '@fluxer/api/src/BrandedTypes';
import {DefaultUserOnly, LoginRequired} from '@fluxer/api/src/middleware/AuthMiddleware';
import {RateLimitMiddleware} from '@fluxer/api/src/middleware/RateLimitMiddleware';
import {OpenAPI} from '@fluxer/api/src/middleware/ResponseTypeMiddleware';
import {RateLimitConfigs} from '@fluxer/api/src/RateLimitConfig';
import type {HonoApp} from '@fluxer/api/src/types/HonoEnv';
import {Validator} from '@fluxer/api/src/Validator';
import {
ChunkedUploadChunkParam,
ChunkedUploadParam,
CompleteChunkedUploadRequest,
CompleteChunkedUploadResponse,
CreateChunkedUploadRequest,
CreateChunkedUploadResponse,
UploadChunkResponse,
} from '@fluxer/schema/src/domains/channel/ChunkedUploadSchemas';
import {ChannelIdParam} from '@fluxer/schema/src/domains/common/CommonParamSchemas';
export function ChunkedUploadController(app: HonoApp) {
app.post(
'/channels/:channel_id/chunked-uploads',
RateLimitMiddleware(RateLimitConfigs.CHANNEL_CHUNKED_UPLOAD_CREATE),
LoginRequired,
DefaultUserOnly,
Validator('param', ChannelIdParam),
Validator('json', CreateChunkedUploadRequest),
OpenAPI({
operationId: 'create_chunked_upload',
summary: 'Initiate a chunked upload session',
description:
'Creates a new chunked upload session for uploading large files. Returns the upload ID, expected chunk size, and total chunk count. The client should then upload each chunk individually and complete the upload when all chunks are uploaded.',
responseSchema: CreateChunkedUploadResponse,
statusCode: 201,
security: ['bearerToken', 'sessionToken'],
tags: ['Channels', 'Attachments'],
}),
async (ctx) => {
const user = ctx.get('user');
const channelId = createChannelID(ctx.req.valid('param').channel_id);
const body = ctx.req.valid('json');
const chunkedUploadService = ctx.get('chunkedUploadService');
const result = await chunkedUploadService.initiateUpload(user.id, channelId, body);
return ctx.json(result, 201);
},
);
app.put(
'/channels/:channel_id/chunked-uploads/:upload_id/chunks/:chunk_index',
RateLimitMiddleware(RateLimitConfigs.CHANNEL_CHUNKED_UPLOAD_CHUNK),
LoginRequired,
DefaultUserOnly,
Validator('param', ChunkedUploadChunkParam),
OpenAPI({
operationId: 'upload_chunk',
summary: 'Upload a file chunk',
description:
'Uploads a single chunk of a file as part of a chunked upload session. The chunk index is zero-based. Returns an ETag that must be provided when completing the upload.',
responseSchema: UploadChunkResponse,
statusCode: 200,
security: ['bearerToken', 'sessionToken'],
tags: ['Channels', 'Attachments'],
}),
async (ctx) => {
const user = ctx.get('user');
const {upload_id, chunk_index} = ctx.req.valid('param');
const arrayBuffer = await ctx.req.arrayBuffer();
const body = new Uint8Array(arrayBuffer);
const chunkedUploadService = ctx.get('chunkedUploadService');
const result = await chunkedUploadService.uploadChunk(user.id, upload_id, chunk_index, body);
return ctx.json(result);
},
);
app.post(
'/channels/:channel_id/chunked-uploads/:upload_id/complete',
RateLimitMiddleware(RateLimitConfigs.CHANNEL_CHUNKED_UPLOAD_COMPLETE),
LoginRequired,
DefaultUserOnly,
Validator('param', ChunkedUploadParam),
Validator('json', CompleteChunkedUploadRequest),
OpenAPI({
operationId: 'complete_chunked_upload',
summary: 'Complete a chunked upload',
description:
'Completes a chunked upload session by assembling all uploaded chunks. Requires ETags for all chunks. Returns the upload filename that can be referenced when sending a message with the uploaded file.',
responseSchema: CompleteChunkedUploadResponse,
statusCode: 200,
security: ['bearerToken', 'sessionToken'],
tags: ['Channels', 'Attachments'],
}),
async (ctx) => {
const user = ctx.get('user');
const {upload_id} = ctx.req.valid('param');
const body = ctx.req.valid('json');
const chunkedUploadService = ctx.get('chunkedUploadService');
const result = await chunkedUploadService.completeUpload(user.id, upload_id, body);
return ctx.json(result);
},
);
}

View File

@@ -19,6 +19,7 @@
import {CallController} from '@fluxer/api/src/channel/controllers/CallController';
import {ChannelController} from '@fluxer/api/src/channel/controllers/ChannelController';
import {ChunkedUploadController} from '@fluxer/api/src/channel/controllers/ChunkedUploadController';
import {MessageController} from '@fluxer/api/src/channel/controllers/MessageController';
import {MessageInteractionController} from '@fluxer/api/src/channel/controllers/MessageInteractionController';
import {ScheduledMessageController} from '@fluxer/api/src/channel/controllers/ScheduledMessageController';
@@ -30,6 +31,7 @@ export function registerChannelControllers(app: HonoApp) {
MessageInteractionController(app);
MessageController(app);
ScheduledMessageController(app);
ChunkedUploadController(app);
CallController(app);
StreamController(app);
}

View File

@@ -65,15 +65,13 @@ export class ChannelDataRepository extends IChannelDataRepository {
const channelId = data.channel_id;
const result = await executeVersionedUpdate<ChannelRow, 'channel_id' | 'soft_deleted'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<ChannelRow>(FETCH_CHANNEL_BY_ID.bind({channel_id: channelId, soft_deleted: false}));
},
async () => fetchOne<ChannelRow>(FETCH_CHANNEL_BY_ID.bind({channel_id: channelId, soft_deleted: false})),
(current) => ({
pk: {channel_id: channelId, soft_deleted: false},
patch: buildPatchFromData(data, current, CHANNEL_COLUMNS, ['channel_id', 'soft_deleted']),
}),
Channels,
{initialData: oldData},
);
if (data.guild_id) {

View File

@@ -727,7 +727,6 @@ export class MessageDataRepository {
const result = await executeVersionedUpdate<MessageRow, 'channel_id' | 'bucket' | 'message_id'>(
async () => {
if (oldData !== undefined) return oldData;
const pk = {
channel_id: data.channel_id,
bucket: data.bucket,
@@ -745,6 +744,7 @@ export class MessageDataRepository {
patch: buildPatchFromData(data, current, MESSAGE_COLUMNS, ['channel_id', 'bucket', 'message_id']),
}),
Messages,
{initialData: oldData},
);
if (!result.applied) {

View File

@@ -0,0 +1,227 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, UserID} from '@fluxer/api/src/BrandedTypes';
import {Config} from '@fluxer/api/src/Config';
import type {AuthenticatedChannel} from '@fluxer/api/src/channel/services/AuthenticatedChannel';
import {getContentType} from '@fluxer/api/src/channel/services/message/MessageHelpers';
import type {IStorageService} from '@fluxer/api/src/infrastructure/IStorageService';
import type {LimitConfigService} from '@fluxer/api/src/limits/LimitConfigService';
import {resolveLimitSafe} from '@fluxer/api/src/limits/LimitConfigUtils';
import {createLimitMatchContext} from '@fluxer/api/src/limits/LimitMatchContextBuilder';
import type {Channel} from '@fluxer/api/src/models/Channel';
import type {IUserRepository} from '@fluxer/api/src/user/IUserRepository';
import {Permissions} from '@fluxer/constants/src/ChannelConstants';
import {
ATTACHMENT_MAX_SIZE_NON_PREMIUM,
CHUNKED_UPLOAD_CHUNK_SIZE,
CHUNKED_UPLOAD_MAX_CHUNKS,
CHUNKED_UPLOAD_SESSION_TTL_SECONDS,
} from '@fluxer/constants/src/LimitConstants';
import {ChunkedUploadChunkIndexOutOfRangeError} from '@fluxer/errors/src/domains/channel/ChunkedUploadChunkIndexOutOfRangeError';
import {ChunkedUploadIncompleteError} from '@fluxer/errors/src/domains/channel/ChunkedUploadIncompleteError';
import {ChunkedUploadNotFoundError} from '@fluxer/errors/src/domains/channel/ChunkedUploadNotFoundError';
import {ChunkedUploadNotOwnedError} from '@fluxer/errors/src/domains/channel/ChunkedUploadNotOwnedError';
import {FileSizeTooLargeError} from '@fluxer/errors/src/domains/core/FileSizeTooLargeError';
import {UnknownUserError} from '@fluxer/errors/src/domains/user/UnknownUserError';
import type {IKVProvider} from '@fluxer/kv_client/src/IKVProvider';
import type {
CompleteChunkedUploadRequest,
CompleteChunkedUploadResponse,
CreateChunkedUploadRequest,
CreateChunkedUploadResponse,
UploadChunkResponse,
} from '@fluxer/schema/src/domains/channel/ChunkedUploadSchemas';
interface ChunkedUploadSession {
userId: string;
channelId: string;
s3UploadId: string;
uploadFilename: string;
filename: string;
fileSize: number;
chunkSize: number;
chunkCount: number;
contentType: string;
}
function sessionKey(uploadId: string): string {
return `chunked_upload:${uploadId}`;
}
export class ChunkedUploadService {
constructor(
private storageService: IStorageService,
private kvProvider: IKVProvider,
private userRepository: IUserRepository,
private limitConfigService: LimitConfigService,
private getChannelAuthenticated: (params: {userId: UserID; channelId: ChannelID}) => Promise<AuthenticatedChannel>,
private ensureTextChannel: (channel: Channel) => void,
) {}
async initiateUpload(
userId: UserID,
channelId: ChannelID,
request: CreateChunkedUploadRequest,
): Promise<CreateChunkedUploadResponse> {
const {channel, guild, checkPermission} = await this.getChannelAuthenticated({userId, channelId});
this.ensureTextChannel(channel);
if (guild) {
await checkPermission(Permissions.SEND_MESSAGES | Permissions.ATTACH_FILES);
}
const user = await this.userRepository.findUnique(userId);
if (!user) {
throw new UnknownUserError();
}
const fallbackMaxSize = ATTACHMENT_MAX_SIZE_NON_PREMIUM;
const ctx = createLimitMatchContext({user, guildFeatures: guild?.features ?? null});
const maxFileSize = resolveLimitSafe(
this.limitConfigService.getConfigSnapshot(),
ctx,
'max_attachment_file_size',
fallbackMaxSize,
);
if (request.file_size > maxFileSize) {
throw new FileSizeTooLargeError(maxFileSize);
}
const chunkCount = Math.ceil(request.file_size / CHUNKED_UPLOAD_CHUNK_SIZE);
if (chunkCount > CHUNKED_UPLOAD_MAX_CHUNKS) {
throw new FileSizeTooLargeError(maxFileSize);
}
const uploadFilename = crypto.randomUUID();
const contentType = getContentType(request.filename);
const {uploadId: s3UploadId} = await this.storageService.createMultipartUpload({
bucket: Config.s3.buckets.uploads,
key: uploadFilename,
contentType,
});
const uploadId = crypto.randomUUID();
const session: ChunkedUploadSession = {
userId: userId.toString(),
channelId: channelId.toString(),
s3UploadId,
uploadFilename,
filename: request.filename,
fileSize: request.file_size,
chunkSize: CHUNKED_UPLOAD_CHUNK_SIZE,
chunkCount,
contentType,
};
await this.kvProvider.setex(sessionKey(uploadId), CHUNKED_UPLOAD_SESSION_TTL_SECONDS, JSON.stringify(session));
return {
upload_id: uploadId,
upload_filename: uploadFilename,
chunk_size: CHUNKED_UPLOAD_CHUNK_SIZE,
chunk_count: chunkCount,
};
}
async uploadChunk(
userId: UserID,
uploadId: string,
chunkIndex: number,
body: Uint8Array,
): Promise<UploadChunkResponse> {
const session = await this.getSession(uploadId);
this.verifyOwnership(session, userId);
if (chunkIndex < 0 || chunkIndex >= session.chunkCount) {
throw new ChunkedUploadChunkIndexOutOfRangeError();
}
const {etag} = await this.storageService.uploadPart({
bucket: Config.s3.buckets.uploads,
key: session.uploadFilename,
uploadId: session.s3UploadId,
partNumber: chunkIndex + 1,
body,
});
return {etag};
}
async completeUpload(
userId: UserID,
uploadId: string,
request: CompleteChunkedUploadRequest,
): Promise<CompleteChunkedUploadResponse> {
const session = await this.getSession(uploadId);
this.verifyOwnership(session, userId);
if (request.etags.length !== session.chunkCount) {
throw new ChunkedUploadIncompleteError();
}
const seenIndices = new Set<number>();
for (const entry of request.etags) {
if (entry.chunk_index < 0 || entry.chunk_index >= session.chunkCount) {
throw new ChunkedUploadChunkIndexOutOfRangeError();
}
if (seenIndices.has(entry.chunk_index)) {
throw new ChunkedUploadIncompleteError();
}
seenIndices.add(entry.chunk_index);
}
const parts = request.etags.map((entry) => ({
partNumber: entry.chunk_index + 1,
etag: entry.etag,
}));
await this.storageService.completeMultipartUpload({
bucket: Config.s3.buckets.uploads,
key: session.uploadFilename,
uploadId: session.s3UploadId,
parts,
});
await this.kvProvider.del(sessionKey(uploadId));
return {
upload_filename: session.uploadFilename,
file_size: session.fileSize,
content_type: session.contentType,
};
}
private async getSession(uploadId: string): Promise<ChunkedUploadSession> {
const raw = await this.kvProvider.get(sessionKey(uploadId));
if (!raw) {
throw new ChunkedUploadNotFoundError();
}
return JSON.parse(raw) as ChunkedUploadSession;
}
private verifyOwnership(session: ChunkedUploadSession, userId: UserID): void {
if (session.userId !== userId.toString()) {
throw new ChunkedUploadNotOwnedError();
}
}
}

View File

@@ -17,9 +17,11 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {S3ServiceException} from '@aws-sdk/client-s3';
import type {ChannelID, UserID} from '@fluxer/api/src/BrandedTypes';
import {Config} from '@fluxer/api/src/Config';
import type {IStorageService} from '@fluxer/api/src/infrastructure/IStorageService';
import {Logger} from '@fluxer/api/src/Logger';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {STREAM_PREVIEW_CONTENT_TYPE_JPEG, STREAM_PREVIEW_MAX_BYTES} from '@fluxer/constants/src/StreamConstants';
import {FileSizeTooLargeError} from '@fluxer/errors/src/domains/core/FileSizeTooLargeError';
@@ -85,13 +87,21 @@ export class StreamPreviewService {
const key = this.getObjectKey(params.streamKey);
const expiresAt = new Date(Date.now() + ms('1 day'));
await this.storageService.uploadObject({
bucket,
key,
body: params.body,
contentType: params.contentType ?? STREAM_PREVIEW_CONTENT_TYPE_JPEG,
expiresAt,
});
try {
await this.storageService.uploadObject({
bucket,
key,
body: params.body,
contentType: params.contentType ?? STREAM_PREVIEW_CONTENT_TYPE_JPEG,
expiresAt,
});
} catch (error) {
if (error instanceof S3ServiceException && error.name === 'OperationAborted') {
Logger.warn({streamKey: params.streamKey}, 'Stream preview upload aborted due to S3 conflict, skipping');
return;
}
throw error;
}
const meta: StreamPreviewMeta = {
bucket,

View File

@@ -17,6 +17,7 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import fs from 'node:fs/promises';
import {createAttachmentID} from '@fluxer/api/src/BrandedTypes';
import {Config} from '@fluxer/api/src/Config';
import type {AttachmentToProcess} from '@fluxer/api/src/channel/AttachmentDTOs';
@@ -45,6 +46,7 @@ import type {GuildMemberResponse} from '@fluxer/schema/src/domains/guild/GuildMe
import type {GuildResponse} from '@fluxer/schema/src/domains/guild/GuildResponseSchemas';
import {recordCounter} from '@fluxer/telemetry/src/Metrics';
import type {IVirusScanService} from '@fluxer/virus_scan/src/IVirusScanService';
import {temporaryFile} from 'tempy';
interface ProcessAttachmentParams {
message: Message;
@@ -397,16 +399,14 @@ export class AttachmentProcessingService {
}
private async scanMalware(attachment: AttachmentToProcess): Promise<{isVirusDetected: boolean}> {
const fileData = await this.storageService.readObject(Config.s3.buckets.uploads, attachment.upload_filename);
if (!fileData) {
throw InputValidationError.fromCode('attachment', ValidationErrorCodes.FILE_NOT_FOUND_FOR_SCANNING);
const tempPath = temporaryFile();
try {
await this.storageService.writeObjectToDisk(Config.s3.buckets.uploads, attachment.upload_filename, tempPath);
const scanResult = await this.virusScanService.scanFile(tempPath);
return {isVirusDetected: !scanResult.isClean};
} finally {
await fs.unlink(tempPath).catch(() => {});
}
const fileBuffer = Buffer.from(fileData);
const scanResult = await this.virusScanService.scanBuffer(fileBuffer, attachment.filename);
return {isVirusDetected: !scanResult.isClean};
}
private deleteUploadObject(bucket: string, key: string): void {

View File

@@ -17,6 +17,7 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {S3ServiceException} from '@aws-sdk/client-s3';
import type {ChannelID, UserID} from '@fluxer/api/src/BrandedTypes';
import {createAttachmentID, createChannelID, createMemeID, createMessageID} from '@fluxer/api/src/BrandedTypes';
import {Config} from '@fluxer/api/src/Config';
@@ -94,13 +95,20 @@ export class MessageOperationsHelpers {
const sourceKey = favoriteMeme.storageKey;
const destKey = makeAttachmentCdnKey(channelId, memeAttachmentId, favoriteMeme.filename);
await this.deps.storageService.copyObject({
sourceBucket: Config.s3.buckets.cdn,
sourceKey,
destinationBucket: Config.s3.buckets.cdn,
destinationKey: destKey,
newContentType: favoriteMeme.contentType,
});
try {
await this.deps.storageService.copyObject({
sourceBucket: Config.s3.buckets.cdn,
sourceKey,
destinationBucket: Config.s3.buckets.cdn,
destinationKey: destKey,
newContentType: favoriteMeme.contentType,
});
} catch (error) {
if (error instanceof S3ServiceException && (error.name === 'NoSuchKey' || error.name === 'NotFound')) {
throw InputValidationError.fromCode('favorite_meme_id', ValidationErrorCodes.FAVORITE_MEME_NOT_FOUND);
}
throw error;
}
let flags = 0;
if (favoriteMeme.isGifv) {

View File

@@ -18,6 +18,7 @@
*/
import type {ChannelID} from '@fluxer/api/src/BrandedTypes';
import {Config} from '@fluxer/api/src/Config';
import {
type AttachmentRequestData,
mergeUploadWithClientData,
@@ -25,6 +26,7 @@ import {
} from '@fluxer/api/src/channel/AttachmentDTOs';
import type {IChannelRepository} from '@fluxer/api/src/channel/IChannelRepository';
import type {MessageRequest, MessageUpdateRequest} from '@fluxer/api/src/channel/MessageTypes';
import {getContentType} from '@fluxer/api/src/channel/services/message/MessageHelpers';
import type {GuildService} from '@fluxer/api/src/guild/services/GuildService';
import type {LimitConfigService} from '@fluxer/api/src/limits/LimitConfigService';
import {resolveLimitSafe} from '@fluxer/api/src/limits/LimitConfigUtils';
@@ -152,7 +154,11 @@ export async function parseMultipartMessageData(
ValidationErrorCodes.ATTACHMENTS_METADATA_REQUIRED_WHEN_UPLOADING,
);
}
}
const hasAttachmentMetadata = data.attachments && Array.isArray(data.attachments) && data.attachments.length > 0;
if (hasAttachmentMetadata) {
type AttachmentMetadata = ClientAttachmentRequest | ClientAttachmentReferenceRequest;
const attachmentMetadata = data.attachments as Array<AttachmentMetadata>;
@@ -163,66 +169,95 @@ export async function parseMultipartMessageData(
(a): a is ClientAttachmentReferenceRequest => !('filename' in a) || a.filename === undefined,
);
const metadataIds = new Set(
newAttachments.map((a) => {
const id = a.id;
return typeof id === 'string' ? parseInt(id, 10) : id;
}),
);
const fileIds = new Set(filesWithIndices.map((f) => f.index));
for (const fileId of fileIds) {
if (!metadataIds.has(fileId)) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_METADATA_FOR_FILE, {fileId});
}
}
const inlineNewAttachments: Array<ClientAttachmentRequest> = [];
const preUploadedNewAttachments: Array<ClientAttachmentRequest> = [];
for (const att of newAttachments) {
const id = typeof att.id === 'string' ? parseInt(att.id, 10) : att.id;
if (!fileIds.has(id)) {
if (fileIds.has(id)) {
inlineNewAttachments.push(att);
} else if (att.uploaded_filename) {
preUploadedNewAttachments.push(att);
} else {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_FILE_FOR_ATTACHMENT_METADATA, {
attachmentId: att.id,
});
}
}
const uploadedAttachments: Array<UploadedAttachment> = await ctx.get('channelService').uploadFormDataAttachments({
userId: user.id,
channelId,
files: filesWithIndices,
attachmentMetadata: newAttachments,
expiresAt: options?.uploadExpiresAt,
});
const inlineMetadataIds = new Set(
inlineNewAttachments.map((a) => {
const id = a.id;
return typeof id === 'string' ? parseInt(id, 10) : id;
}),
);
const uploadedMap = new Map(uploadedAttachments.map((u) => [u.id, u]));
const processedNewAttachments = newAttachments.map((clientData) => {
const id = typeof clientData.id === 'string' ? parseInt(clientData.id, 10) : clientData.id;
const uploaded = uploadedMap.get(id);
if (!uploaded) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_FILE_FOR_ATTACHMENT, {
attachmentId: clientData.id,
});
for (const fileId of fileIds) {
if (!inlineMetadataIds.has(fileId)) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_METADATA_FOR_FILE, {fileId});
}
}
if (clientData.filename !== uploaded.filename) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.FILENAME_MISMATCH_FOR_ATTACHMENT, {
attachmentId: clientData.id,
expectedFilename: clientData.filename,
});
}
let processedInlineAttachments: Array<AttachmentRequestData> = [];
if (inlineNewAttachments.length > 0) {
const uploadedAttachments: Array<UploadedAttachment> = await ctx.get('channelService').uploadFormDataAttachments({
userId: user.id,
channelId,
files: filesWithIndices,
attachmentMetadata: inlineNewAttachments,
expiresAt: options?.uploadExpiresAt,
});
return mergeUploadWithClientData(uploaded, clientData);
});
const uploadedMap = new Map(uploadedAttachments.map((u) => [u.id, u]));
data.attachments = [...existingAttachments, ...processedNewAttachments];
} else if (
data.attachments?.some((a: unknown) => {
const attachment = a as ClientAttachmentRequest | ClientAttachmentReferenceRequest;
return 'filename' in attachment && attachment.filename;
})
) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.ATTACHMENT_METADATA_WITHOUT_FILES);
processedInlineAttachments = inlineNewAttachments.map((clientData) => {
const id = typeof clientData.id === 'string' ? parseInt(clientData.id, 10) : clientData.id;
const uploaded = uploadedMap.get(id);
if (!uploaded) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_FILE_FOR_ATTACHMENT, {
attachmentId: clientData.id,
});
}
if (clientData.filename !== uploaded.filename) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.FILENAME_MISMATCH_FOR_ATTACHMENT, {
attachmentId: clientData.id,
expectedFilename: clientData.filename,
});
}
return mergeUploadWithClientData(uploaded, clientData);
});
}
let processedPreUploadedAttachments: Array<AttachmentRequestData> = [];
if (preUploadedNewAttachments.length > 0) {
const storageService = ctx.get('storageService');
processedPreUploadedAttachments = await Promise.all(
preUploadedNewAttachments.map(async (clientData) => {
const uploadFilename = clientData.uploaded_filename!;
const metadata = await storageService.getObjectMetadata(Config.s3.buckets.uploads, uploadFilename);
if (!metadata) {
throw InputValidationError.fromCode('attachments', ValidationErrorCodes.NO_FILE_FOR_ATTACHMENT_METADATA, {
attachmentId: clientData.id,
});
}
const uploaded: UploadedAttachment = {
id: typeof clientData.id === 'string' ? parseInt(clientData.id, 10) : clientData.id,
upload_filename: uploadFilename,
filename: clientData.filename,
file_size: metadata.contentLength,
content_type: getContentType(clientData.filename),
};
return mergeUploadWithClientData(uploaded, clientData);
}),
);
}
data.attachments = [...existingAttachments, ...processedInlineAttachments, ...processedPreUploadedAttachments];
}
return data as MessageRequest | MessageUpdateRequest;

View File

@@ -366,6 +366,7 @@ export class MessageSendService {
userId: user.id,
channelId: createChannelID(data.message_reference.channel_id!),
});
await this.ensureForwardSourceAccess(forwardReferenceAuthChannel);
referenceChannelId = forwardReferenceAuthChannel.channel.id;
referencedChannelGuildId = forwardReferenceAuthChannel.channel.guildId ?? null;
}
@@ -469,6 +470,7 @@ export class MessageSendService {
userId: user.id,
channelId: createChannelID(data.message_reference!.channel_id!),
});
await this.ensureForwardSourceAccess(forwardReferenceAuthChannel);
referenceChannelId = forwardReferenceAuthChannel.channel.id;
referencedChannelGuildId = forwardReferenceAuthChannel.channel.guildId ?? null;
}
@@ -506,6 +508,13 @@ export class MessageSendService {
return {referencedMessage, referencedChannelGuildId, messageSnapshots};
}
private async ensureForwardSourceAccess(authChannel: AuthenticatedChannel): Promise<void> {
if (authChannel.guild) {
await authChannel.checkPermission(Permissions.VIEW_CHANNEL);
await authChannel.checkPermission(Permissions.READ_MESSAGE_HISTORY);
}
}
private async ensureReferencedMessageIsVisible({
referencedMessage,
authChannel,

View File

@@ -20,7 +20,6 @@
import {readFileSync} from 'node:fs';
import {join} from 'node:path';
import {createTestAccount, type TestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {Config} from '@fluxer/api/src/Config';
import {ensureSessionStarted} from '@fluxer/api/src/message/tests/MessageTestUtils';
import type {ApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {createBuilder} from '@fluxer/api/src/test/TestRequestBuilder';
@@ -152,14 +151,5 @@ export async function setupTestGuildAndChannel(
export async function createTestAccountForAttachmentTests(harness: ApiTestHarness): Promise<TestAccount> {
const account = await createTestAccount(harness);
await ensureSessionStarted(harness, account.token);
await createBuilder<{type: 'session'}>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'session',
token: account.token,
version: 1,
ip: '127.0.0.1',
})
.execute();
return account;
}

View File

@@ -0,0 +1,396 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {TestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {createMultipartFormData, setupTestGuildAndChannel} from '@fluxer/api/src/channel/tests/AttachmentTestUtils';
import type {ApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {createBuilder} from '@fluxer/api/src/test/TestRequestBuilder';
import {APIErrorCodes} from '@fluxer/constants/src/ApiErrorCodes';
import {CHUNKED_UPLOAD_CHUNK_SIZE} from '@fluxer/constants/src/LimitConstants';
import type {ChannelResponse} from '@fluxer/schema/src/domains/channel/ChannelSchemas';
import type {
CompleteChunkedUploadResponse,
CreateChunkedUploadResponse,
UploadChunkResponse,
} from '@fluxer/schema/src/domains/channel/ChunkedUploadSchemas';
import type {MessageResponse} from '@fluxer/schema/src/domains/message/MessageResponseSchemas';
import {afterAll, beforeAll, beforeEach, describe, expect, it} from 'vitest';
let harness: ApiTestHarness;
beforeAll(async () => {
harness = await createApiTestHarness();
});
beforeEach(async () => {
await harness.reset();
});
afterAll(async () => {
await harness?.shutdown();
});
async function initiateChunkedUpload(
token: string,
channelId: string,
filename: string,
fileSize: number,
): Promise<CreateChunkedUploadResponse> {
return createBuilder<CreateChunkedUploadResponse>(harness, token)
.post(`/channels/${channelId}/chunked-uploads`)
.body({filename, file_size: fileSize})
.expect(201)
.execute();
}
async function uploadChunk(
token: string,
channelId: string,
uploadId: string,
chunkIndex: number,
data: Buffer,
): Promise<UploadChunkResponse> {
const response = await harness.app.request(
`/channels/${channelId}/chunked-uploads/${uploadId}/chunks/${chunkIndex}`,
{
method: 'PUT',
headers: new Headers({
Authorization: token,
'Content-Type': 'application/octet-stream',
'x-forwarded-for': '127.0.0.1',
}),
body: data,
},
);
expect(response.status).toBe(200);
return (await response.json()) as UploadChunkResponse;
}
async function completeChunkedUpload(
token: string,
channelId: string,
uploadId: string,
etags: Array<{chunk_index: number; etag: string}>,
): Promise<CompleteChunkedUploadResponse> {
return createBuilder<CompleteChunkedUploadResponse>(harness, token)
.post(`/channels/${channelId}/chunked-uploads/${uploadId}/complete`)
.body({etags})
.execute();
}
describe('Chunked Uploads', () => {
let account: TestAccount;
let channel: ChannelResponse;
beforeEach(async () => {
const setup = await setupTestGuildAndChannel(harness);
account = setup.account;
channel = setup.channel;
});
describe('POST /channels/:channel_id/chunked-uploads', () => {
it('should initiate a chunked upload session', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE * 2 + 100;
const result = await initiateChunkedUpload(account.token, channel.id, 'large-file.bin', fileSize);
expect(result.upload_id).toBeDefined();
expect(result.upload_filename).toBeDefined();
expect(result.chunk_size).toBe(CHUNKED_UPLOAD_CHUNK_SIZE);
expect(result.chunk_count).toBe(3);
});
it('should reject when file size exceeds the limit', async () => {
const hugeSize = 1024 * 1024 * 1024 * 10;
await createBuilder(harness, account.token)
.post(`/channels/${channel.id}/chunked-uploads`)
.body({filename: 'huge.bin', file_size: hugeSize})
.expect(400, APIErrorCodes.FILE_SIZE_TOO_LARGE)
.execute();
});
it('should reject without authentication', async () => {
await createBuilder(harness, '')
.post(`/channels/${channel.id}/chunked-uploads`)
.body({filename: 'file.bin', file_size: 1024})
.expect(401)
.execute();
});
});
describe('PUT /channels/:channel_id/chunked-uploads/:upload_id/chunks/:chunk_index', () => {
it('should upload a chunk and return an etag', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE + 100;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'test.bin', fileSize);
const chunkData = Buffer.alloc(CHUNKED_UPLOAD_CHUNK_SIZE, 0xab);
const result = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunkData);
expect(result.etag).toBeDefined();
expect(typeof result.etag).toBe('string');
});
it('should reject chunk index out of range', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE + 100;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'test.bin', fileSize);
const chunkData = Buffer.alloc(100, 0xab);
const response = await harness.app.request(
`/channels/${channel.id}/chunked-uploads/${initResult.upload_id}/chunks/99`,
{
method: 'PUT',
headers: new Headers({
Authorization: account.token,
'Content-Type': 'application/octet-stream',
'x-forwarded-for': '127.0.0.1',
}),
body: chunkData,
},
);
expect(response.status).toBe(400);
const body = (await response.json()) as {code: string};
expect(body.code).toBe(APIErrorCodes.CHUNKED_UPLOAD_CHUNK_INDEX_OUT_OF_RANGE);
});
it('should reject for non-existent upload session', async () => {
const chunkData = Buffer.alloc(100, 0xab);
const response = await harness.app.request(`/channels/${channel.id}/chunked-uploads/non-existent-id/chunks/0`, {
method: 'PUT',
headers: new Headers({
Authorization: account.token,
'Content-Type': 'application/octet-stream',
'x-forwarded-for': '127.0.0.1',
}),
body: chunkData,
});
expect(response.status).toBe(404);
const body = (await response.json()) as {code: string};
expect(body.code).toBe(APIErrorCodes.CHUNKED_UPLOAD_NOT_FOUND);
});
});
describe('POST /channels/:channel_id/chunked-uploads/:upload_id/complete', () => {
it('should complete a chunked upload', async () => {
const chunkSize = CHUNKED_UPLOAD_CHUNK_SIZE;
const fileSize = chunkSize * 2;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'two-chunks.bin', fileSize);
const chunk0 = Buffer.alloc(chunkSize, 0xaa);
const chunk1 = Buffer.alloc(chunkSize, 0xbb);
const etag0 = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunk0);
const etag1 = await uploadChunk(account.token, channel.id, initResult.upload_id, 1, chunk1);
const result = await completeChunkedUpload(account.token, channel.id, initResult.upload_id, [
{chunk_index: 0, etag: etag0.etag},
{chunk_index: 1, etag: etag1.etag},
]);
expect(result.upload_filename).toBe(initResult.upload_filename);
expect(result.file_size).toBe(fileSize);
expect(result.content_type).toBeDefined();
});
it('should reject when not all chunks have been provided', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE * 2;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'test.bin', fileSize);
const chunk0 = Buffer.alloc(CHUNKED_UPLOAD_CHUNK_SIZE, 0xaa);
const etag0 = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunk0);
await createBuilder(harness, account.token)
.post(`/channels/${channel.id}/chunked-uploads/${initResult.upload_id}/complete`)
.body({etags: [{chunk_index: 0, etag: etag0.etag}]})
.expect(400, APIErrorCodes.CHUNKED_UPLOAD_INCOMPLETE)
.execute();
});
it('should reject duplicate chunk indices', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE * 2;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'test.bin', fileSize);
const chunk0 = Buffer.alloc(CHUNKED_UPLOAD_CHUNK_SIZE, 0xaa);
const etag0 = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunk0);
await createBuilder(harness, account.token)
.post(`/channels/${channel.id}/chunked-uploads/${initResult.upload_id}/complete`)
.body({
etags: [
{chunk_index: 0, etag: etag0.etag},
{chunk_index: 0, etag: etag0.etag},
],
})
.expect(400, APIErrorCodes.CHUNKED_UPLOAD_INCOMPLETE)
.execute();
});
});
describe('Upload ownership', () => {
it('should reject chunk upload from a different user', async () => {
const fileSize = CHUNKED_UPLOAD_CHUNK_SIZE + 100;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'test.bin', fileSize);
const otherSetup = await setupTestGuildAndChannel(harness);
const otherAccount = otherSetup.account;
const chunkData = Buffer.alloc(100, 0xab);
const response = await harness.app.request(
`/channels/${channel.id}/chunked-uploads/${initResult.upload_id}/chunks/0`,
{
method: 'PUT',
headers: new Headers({
Authorization: otherAccount.token,
'Content-Type': 'application/octet-stream',
'x-forwarded-for': '127.0.0.1',
}),
body: chunkData,
},
);
expect(response.status).toBe(403);
const body = (await response.json()) as {code: string};
expect(body.code).toBe(APIErrorCodes.CHUNKED_UPLOAD_NOT_OWNED);
});
});
describe('End-to-end: chunked upload + message send', () => {
it('should send a message with a pre-uploaded file', async () => {
const chunkSize = CHUNKED_UPLOAD_CHUNK_SIZE;
const fileSize = chunkSize + 500;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'uploaded-file.txt', fileSize);
const chunk0 = Buffer.alloc(chunkSize, 0x41);
const chunk1 = Buffer.alloc(500, 0x42);
const etag0 = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunk0);
const etag1 = await uploadChunk(account.token, channel.id, initResult.upload_id, 1, chunk1);
await completeChunkedUpload(account.token, channel.id, initResult.upload_id, [
{chunk_index: 0, etag: etag0.etag},
{chunk_index: 1, etag: etag1.etag},
]);
const payload = {
content: 'Message with chunked upload',
attachments: [
{
id: 0,
filename: 'uploaded-file.txt',
uploaded_filename: initResult.upload_filename,
},
],
};
const {body, contentType} = createMultipartFormData(payload, []);
const mergedHeaders = new Headers();
mergedHeaders.set('Content-Type', contentType);
mergedHeaders.set('Authorization', account.token);
mergedHeaders.set('x-forwarded-for', '127.0.0.1');
const response = await harness.app.request(`/channels/${channel.id}/messages`, {
method: 'POST',
headers: mergedHeaders,
body,
});
expect(response.status).toBe(200);
const message = (await response.json()) as MessageResponse;
expect(message.content).toBe('Message with chunked upload');
expect(message.attachments).toBeDefined();
expect(message.attachments!.length).toBe(1);
expect(message.attachments![0].filename).toBe('uploaded-file.txt');
});
it('should send a message with both inline and pre-uploaded files', async () => {
const chunkSize = CHUNKED_UPLOAD_CHUNK_SIZE;
const fileSize = chunkSize + 100;
const initResult = await initiateChunkedUpload(account.token, channel.id, 'large.bin', fileSize);
const chunk0 = Buffer.alloc(chunkSize, 0xcc);
const chunk1 = Buffer.alloc(100, 0xdd);
const etag0 = await uploadChunk(account.token, channel.id, initResult.upload_id, 0, chunk0);
const etag1 = await uploadChunk(account.token, channel.id, initResult.upload_id, 1, chunk1);
await completeChunkedUpload(account.token, channel.id, initResult.upload_id, [
{chunk_index: 0, etag: etag0.etag},
{chunk_index: 1, etag: etag1.etag},
]);
const smallFileData = Buffer.from('small inline file content');
const payload = {
content: 'Mixed upload message',
attachments: [
{
id: 0,
filename: 'small.txt',
},
{
id: 1,
filename: 'large.bin',
uploaded_filename: initResult.upload_filename,
},
],
};
const {response, json} = await sendMixedMessage(account.token, channel.id, payload, [
{index: 0, filename: 'small.txt', data: smallFileData},
]);
expect(response.status).toBe(200);
expect(json.content).toBe('Mixed upload message');
expect(json.attachments).toBeDefined();
expect(json.attachments!.length).toBe(2);
});
});
});
async function sendMixedMessage(
token: string,
channelId: string,
payload: Record<string, unknown>,
files: Array<{index: number; filename: string; data: Buffer}>,
): Promise<{response: Response; json: MessageResponse}> {
const {body, contentType} = createMultipartFormData(payload, files);
const mergedHeaders = new Headers();
mergedHeaders.set('Content-Type', contentType);
mergedHeaders.set('Authorization', token);
mergedHeaders.set('x-forwarded-for', '127.0.0.1');
const response = await harness.app.request(`/channels/${channelId}/messages`, {
method: 'POST',
headers: mergedHeaders,
body,
});
const text = await response.text();
let json: MessageResponse = undefined as unknown as MessageResponse;
if (text.length > 0) {
try {
json = JSON.parse(text) as MessageResponse;
} catch {}
}
return {response, json};
}

View File

@@ -38,10 +38,10 @@ export interface APIConfig {
url: string;
};
gateway: {
rpcEndpoint: string;
rpcSecret: string;
rpcTcpPort: number;
nats: {
coreUrl: string;
jetStreamUrl: string;
authToken: string;
};
mediaProxy: {

View File

@@ -0,0 +1,27 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {APIErrorCodes} from '@fluxer/constants/src/ApiErrorCodes';
import {BadRequestError} from '@fluxer/errors/src/domains/core/BadRequestError';
export class BlueskyOAuthAuthorizationFailedError extends BadRequestError {
constructor() {
super({code: APIErrorCodes.BLUESKY_OAUTH_AUTHORIZATION_FAILED});
}
}

View File

@@ -1830,12 +1830,12 @@ export async function executeVersionedUpdate<
fetchCurrent: () => Promise<Row | null>,
buildPatch: (current: Row | null) => {pk: Record<string, unknown>; patch: Patch},
table: Table<Row, PK>,
opts?: {maxRetries?: number},
opts?: {maxRetries?: number; initialData?: Row | null},
): Promise<{applied: boolean; finalVersion: number | null}> {
const maxRetries = opts?.maxRetries ?? DEFAULT_LWT_RETRIES;
for (let attempt = 0; attempt < maxRetries; attempt++) {
const current = await fetchCurrent();
const current = attempt === 0 && opts?.initialData !== undefined ? opts.initialData : await fetchCurrent();
const currentVersion = current?.version ?? null;
const newVersion = nextVersion(currentVersion);

View File

@@ -169,20 +169,21 @@ export function DownloadController(routes: Hono<HonoEnv>): void {
operationId: 'download_file',
summary: 'Download file',
responseSchema: null,
statusCode: 200,
statusCode: 302,
security: [],
tags: ['Downloads'],
description: 'Downloads files from the download service with support for range requests and caching.',
description: 'Redirects to a presigned URL for the requested file.',
}),
async (ctx) => {
const response = await ctx.get('downloadService').streamDownload({
const url = await ctx.get('downloadService').resolveDownloadRedirect({
path: ctx.req.path,
rangeHeader: ctx.req.header('range'),
});
if (!response) {
if (!url) {
return ctx.text('Not Found', 404);
}
return response;
const res = ctx.redirect(url, 302);
res.headers.set('Cache-Control', 'public, max-age=300');
return res;
},
);
}

View File

@@ -415,7 +415,7 @@ export class DownloadService {
return null;
}
async streamDownload(params: {path: string; rangeHeader?: string | null}): Promise<Response | null> {
async resolveDownloadRedirect(params: {path: string}): Promise<string | null> {
const key = this.buildKeyFromPath(params.path);
if (!key) {
return null;
@@ -429,20 +429,13 @@ export class DownloadService {
for (const candidateKey of keysToTry) {
try {
const streamResult = await this.storageService.streamObject({
bucket: Config.s3.buckets.downloads,
key: candidateKey,
range: params.rangeHeader ?? undefined,
});
if (!streamResult) {
continue;
const metadata = await this.storageService.getObjectMetadata(Config.s3.buckets.downloads, candidateKey);
if (metadata) {
return this.storageService.getPresignedDownloadURL({
bucket: Config.s3.buckets.downloads,
key: candidateKey,
});
}
const headers = this.buildDownloadHeaders(streamResult);
const status = streamResult.contentRange ? 206 : 200;
const body = Readable.toWeb(streamResult.body);
return new Response(body as ReadableStream, {headers, status});
} catch (error) {
if (error instanceof S3ServiceException && (error.name === 'NoSuchKey' || error.name === 'NotFound')) {
continue;
@@ -681,26 +674,4 @@ export class DownloadService {
const [, prefix, , , arch, suffix] = match;
return `${prefix}/${arch}${suffix}`;
}
private buildDownloadHeaders(metadata: {
contentLength: number;
contentType?: string | null;
contentRange?: string | null;
}): Headers {
const headers = new Headers();
if (metadata.contentType) {
headers.set('Content-Type', metadata.contentType);
} else {
headers.set('Content-Type', 'application/octet-stream');
}
headers.set('Content-Length', metadata.contentLength.toString());
if (metadata.contentRange) {
headers.set('Content-Range', metadata.contentRange);
headers.set('Accept-Ranges', 'bytes');
} else {
headers.set('Accept-Ranges', 'bytes');
}
headers.set('Cache-Control', 'public, max-age=86400');
return headers;
}
}

View File

@@ -44,6 +44,7 @@ import {InputValidationError} from '@fluxer/errors/src/domains/core/InputValidat
import {MaxFavoriteMemesError} from '@fluxer/errors/src/domains/core/MaxFavoriteMemesError';
import {MediaMetadataError} from '@fluxer/errors/src/domains/core/MediaMetadataError';
import {UnknownFavoriteMemeError} from '@fluxer/errors/src/domains/core/UnknownFavoriteMemeError';
import {normalizeFilename} from '@fluxer/schema/src/primitives/FileValidators';
import mime from 'mime';
export class FavoriteMemeService {
@@ -431,8 +432,10 @@ export class FavoriteMemeService {
const extension = mime.getExtension(contentType) || 'bin';
try {
const urlPath = new URL(url).pathname;
const urlFilename = urlPath.split('/').pop() || 'media';
return urlFilename.includes('.') ? urlFilename : `${urlFilename}.${extension}`;
const rawSegment = urlPath.split('/').pop() || 'media';
const decoded = decodeURIComponent(rawSegment);
const base = decoded.includes('.') ? decoded : `${decoded}.${extension}`;
return normalizeFilename(base) || `media.${extension}`;
} catch {
return `media.${extension}`;
}
@@ -675,8 +678,12 @@ export class FavoriteMemeService {
private extractFilenameFromUrl(url: string): string | null {
try {
const urlObj = new URL(url);
const parts = urlObj.pathname.split('/');
return parts[parts.length - 1] || null;
const rawSegment = urlObj.pathname.split('/').pop();
if (!rawSegment) {
return null;
}
const decoded = decodeURIComponent(rawSegment);
return normalizeFilename(decoded) || null;
} catch {
return null;
}

View File

@@ -183,8 +183,23 @@ describe('Favorite Meme Extended Tests', () => {
});
describe('Personal Notes (DM to self with meme)', () => {
test('should send favorite meme as attachment in personal notes', async () => {
async function createAccountWithPersonalNotes(harness: ApiTestHarness) {
const account = await createTestAccountForAttachmentTests(harness);
await createBuilderWithoutAuth(harness)
.post('/test/rpc-session-init')
.body({
type: 'session',
token: account.token,
version: 1,
ip: '127.0.0.1',
})
.expect(HTTP_STATUS.OK)
.execute();
return account;
}
test('should send favorite meme as attachment in personal notes', async () => {
const account = await createAccountWithPersonalNotes(harness);
const meme = await createFavoriteMemeFromUrl(harness, account.token, {
url: TEST_IMAGE_URL,
@@ -207,7 +222,7 @@ describe('Favorite Meme Extended Tests', () => {
});
test('should fetch personal note message with meme attachment', async () => {
const account = await createTestAccountForAttachmentTests(harness);
const account = await createAccountWithPersonalNotes(harness);
const meme = await createFavoriteMemeFromUrl(harness, account.token, {
url: TEST_IMAGE_URL,

View File

@@ -17,11 +17,11 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createChannelID, createGuildID, createUserID} from '@fluxer/api/src/BrandedTypes';
import {createChannelID, createGuildID, createUserID, type GuildID} from '@fluxer/api/src/BrandedTypes';
import {GatewayRpcClient} from '@fluxer/api/src/infrastructure/GatewayRpcClient';
import {GatewayRpcMethodErrorCodes} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import {GatewayService} from '@fluxer/api/src/infrastructure/GatewayService';
import {createGatewayRpcMethodErrorHandler} from '@fluxer/api/src/test/msw/handlers/GatewayRpcHandlers';
import {server} from '@fluxer/api/src/test/msw/server';
import {MockGatewayRpcTransport} from '@fluxer/api/src/test/mocks/MockGatewayRpcTransport';
import {CallAlreadyExistsError} from '@fluxer/errors/src/domains/channel/CallAlreadyExistsError';
import {InvalidChannelTypeForCallError} from '@fluxer/errors/src/domains/channel/InvalidChannelTypeForCallError';
import {NoActiveCallError} from '@fluxer/errors/src/domains/channel/NoActiveCallError';
@@ -32,34 +32,30 @@ import {MissingPermissionsError} from '@fluxer/errors/src/domains/core/MissingPe
import {ServiceUnavailableError} from '@fluxer/errors/src/domains/core/ServiceUnavailableError';
import {UnknownGuildError} from '@fluxer/errors/src/domains/guild/UnknownGuildError';
import {UserNotInVoiceError} from '@fluxer/errors/src/domains/user/UserNotInVoiceError';
import {afterAll, afterEach, beforeAll, beforeEach, describe, expect, it} from 'vitest';
import {afterEach, beforeEach, describe, expect, it} from 'vitest';
describe('GatewayRpcService Error Handling', () => {
const TEST_GUILD_ID = createGuildID(123456789n);
const TEST_USER_ID = createUserID(987654321n);
const TEST_CHANNEL_ID = createChannelID(111222333n);
let mockTransport: MockGatewayRpcTransport;
let gatewayService: GatewayService;
beforeAll(() => {
server.listen({onUnhandledRequest: 'error'});
});
afterAll(() => {
server.close();
});
beforeEach(() => {
mockTransport = new MockGatewayRpcTransport();
GatewayRpcClient.createForTests(mockTransport);
gatewayService = new GatewayService();
});
afterEach(() => {
server.resetHandlers();
afterEach(async () => {
gatewayService.destroy();
await GatewayRpcClient.resetForTests();
mockTransport.reset();
});
it('transforms guild_not_found RPC error to UnknownGuildError', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_data', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND));
mockTransport.setMethodError('guild.get_data', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND);
await expect(
gatewayService.getGuildData({
@@ -70,7 +66,7 @@ describe('GatewayRpcService Error Handling', () => {
});
it('transforms forbidden RPC error to MissingPermissionsError', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_data', GatewayRpcMethodErrorCodes.FORBIDDEN));
mockTransport.setMethodError('guild.get_data', GatewayRpcMethodErrorCodes.FORBIDDEN);
await expect(
gatewayService.getGuildData({
@@ -81,13 +77,13 @@ describe('GatewayRpcService Error Handling', () => {
});
it('transforms guild_not_found RPC error to UnknownGuildError for non-batched calls', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_counts', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND));
mockTransport.setMethodError('guild.get_counts', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND);
await expect(gatewayService.getGuildCounts(TEST_GUILD_ID)).rejects.toThrow(UnknownGuildError);
});
it('transforms call_already_exists RPC error to CallAlreadyExistsError', async () => {
server.use(createGatewayRpcMethodErrorHandler('call.create', GatewayRpcMethodErrorCodes.CALL_ALREADY_EXISTS));
mockTransport.setMethodError('call.create', GatewayRpcMethodErrorCodes.CALL_ALREADY_EXISTS);
await expect(gatewayService.createCall(TEST_CHANNEL_ID, '123', 'us-east', [], [])).rejects.toThrow(
CallAlreadyExistsError,
@@ -95,27 +91,25 @@ describe('GatewayRpcService Error Handling', () => {
});
it('transforms call_not_found RPC error to NoActiveCallError', async () => {
server.use(createGatewayRpcMethodErrorHandler('call.delete', GatewayRpcMethodErrorCodes.CALL_NOT_FOUND));
mockTransport.setMethodError('call.delete', GatewayRpcMethodErrorCodes.CALL_NOT_FOUND);
await expect(gatewayService.deleteCall(TEST_CHANNEL_ID)).rejects.toThrow(NoActiveCallError);
});
it('transforms channel_not_found RPC error to UnknownChannelError', async () => {
server.use(createGatewayRpcMethodErrorHandler('call.get', GatewayRpcMethodErrorCodes.CHANNEL_NOT_FOUND));
mockTransport.setMethodError('call.get', GatewayRpcMethodErrorCodes.CHANNEL_NOT_FOUND);
await expect(gatewayService.getCall(TEST_CHANNEL_ID)).rejects.toThrow(UnknownChannelError);
});
it('transforms channel_not_voice RPC error to InvalidChannelTypeForCallError', async () => {
server.use(createGatewayRpcMethodErrorHandler('call.get', GatewayRpcMethodErrorCodes.CHANNEL_NOT_VOICE));
mockTransport.setMethodError('call.get', GatewayRpcMethodErrorCodes.CHANNEL_NOT_VOICE);
await expect(gatewayService.getCall(TEST_CHANNEL_ID)).rejects.toThrow(InvalidChannelTypeForCallError);
});
it('transforms user_not_in_voice RPC error to UserNotInVoiceError', async () => {
server.use(
createGatewayRpcMethodErrorHandler('guild.update_member_voice', GatewayRpcMethodErrorCodes.USER_NOT_IN_VOICE),
);
mockTransport.setMethodError('guild.update_member_voice', GatewayRpcMethodErrorCodes.USER_NOT_IN_VOICE);
await expect(
gatewayService.updateMemberVoice({
@@ -128,7 +122,7 @@ describe('GatewayRpcService Error Handling', () => {
});
it('transforms timeout RPC error to GatewayTimeoutError', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_data', GatewayRpcMethodErrorCodes.TIMEOUT));
mockTransport.setMethodError('guild.get_data', GatewayRpcMethodErrorCodes.TIMEOUT);
await expect(
gatewayService.getGuildData({
@@ -139,7 +133,7 @@ describe('GatewayRpcService Error Handling', () => {
});
it('does not open circuit breaker for mapped gateway business errors', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_data', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND));
mockTransport.setMethodError('guild.get_data', GatewayRpcMethodErrorCodes.GUILD_NOT_FOUND);
for (let attempt = 0; attempt < 6; attempt += 1) {
await expect(
@@ -152,7 +146,7 @@ describe('GatewayRpcService Error Handling', () => {
});
it('opens circuit breaker for repeated gateway internal errors', async () => {
server.use(createGatewayRpcMethodErrorHandler('guild.get_data', GatewayRpcMethodErrorCodes.INTERNAL_ERROR));
mockTransport.setMethodError('guild.get_data', GatewayRpcMethodErrorCodes.INTERNAL_ERROR);
for (let attempt = 0; attempt < 5; attempt += 1) {
await expect(
@@ -170,4 +164,47 @@ describe('GatewayRpcService Error Handling', () => {
}),
).rejects.toThrow(ServiceUnavailableError);
});
it('parses both member_count and online_count from getDiscoveryGuildCounts', async () => {
const guildIdA = createGuildID(100n);
const guildIdB = createGuildID(200n);
mockTransport.setMethodResult('guild.get_online_counts_batch', {
online_counts: [
{guild_id: '100', member_count: 500, online_count: 42},
{guild_id: '200', member_count: 1200, online_count: 300},
],
});
const counts = await gatewayService.getDiscoveryGuildCounts([guildIdA, guildIdB]);
expect(counts.size).toBe(2);
expect(counts.get(guildIdA)).toEqual({memberCount: 500, onlineCount: 42});
expect(counts.get(guildIdB)).toEqual({memberCount: 1200, onlineCount: 300});
expect(mockTransport.call).toHaveBeenCalledWith('guild.get_online_counts_batch', {
guild_ids: ['100', '200'],
});
});
it('returns empty map from getDiscoveryGuildCounts when response has no entries', async () => {
mockTransport.setMethodResult('guild.get_online_counts_batch', {
online_counts: [],
});
const counts = await gatewayService.getDiscoveryGuildCounts([createGuildID(999n)]);
expect(counts.size).toBe(0);
});
it('getDiscoveryOnlineCounts still works with member_count present in response', async () => {
mockTransport.setMethodResult('guild.get_online_counts_batch', {
online_counts: [{guild_id: '100', member_count: 500, online_count: 42}],
});
const counts = await gatewayService.getDiscoveryOnlineCounts([createGuildID(100n)]);
expect(counts.size).toBe(1);
expect(counts.get(100n as GuildID)).toBe(42);
});
});

View File

@@ -1,48 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {decodeGatewayTcpFrames, encodeGatewayTcpFrame} from '@fluxer/api/src/infrastructure/GatewayTcpFrameCodec';
import {describe, expect, test} from 'vitest';
describe('GatewayTcpFrameCodec', () => {
test('encodes and decodes a single frame', () => {
const frame = {type: 'ping'};
const encoded = encodeGatewayTcpFrame(frame);
const decoded = decodeGatewayTcpFrames(encoded);
expect(decoded.frames).toEqual([frame]);
expect(decoded.remainder.length).toBe(0);
});
test('decodes multiple frames with trailing partial frame', () => {
const frameA = {type: 'request', id: '1', method: 'process.node_stats', params: {}};
const frameB = {type: 'pong'};
const encodedA = encodeGatewayTcpFrame(frameA);
const encodedB = encodeGatewayTcpFrame(frameB);
const partial = Buffer.from('5\n{"ty', 'utf8');
const combined = Buffer.concat([encodedA, encodedB, partial]);
const decoded = decodeGatewayTcpFrames(combined);
expect(decoded.frames).toEqual([frameA, frameB]);
expect(decoded.remainder.equals(partial)).toBe(true);
});
test('throws on invalid frame length', () => {
const invalid = Buffer.from('x\n{}', 'utf8');
expect(() => decodeGatewayTcpFrames(invalid)).toThrow('Invalid Gateway TCP frame length');
});
});

View File

@@ -1,309 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createServer, type Server, type Socket} from 'node:net';
import type {ILogger} from '@fluxer/api/src/ILogger';
import {decodeGatewayTcpFrames, encodeGatewayTcpFrame} from '@fluxer/api/src/infrastructure/GatewayTcpFrameCodec';
import {GatewayTcpRpcTransport} from '@fluxer/api/src/infrastructure/GatewayTcpRpcTransport';
import {afterEach, beforeEach, describe, expect, test, vi} from 'vitest';
interface TestTcpServer {
server: Server;
port: number;
}
interface ConnectionContext {
socket: Socket;
getBuffer: () => Buffer<ArrayBufferLike>;
setBuffer: (buffer: Buffer<ArrayBufferLike>) => void;
}
function createNoopLogger(): ILogger {
return {
trace: vi.fn(),
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
fatal: vi.fn(),
child: vi.fn(() => createNoopLogger()),
};
}
async function startTestServer(
handler: (context: ConnectionContext, frame: Record<string, unknown>) => void,
): Promise<TestTcpServer> {
const server = createServer((socket) => {
let buffer: Buffer<ArrayBufferLike> = Buffer.alloc(0);
const context: ConnectionContext = {
socket,
getBuffer: () => buffer,
setBuffer: (nextBuffer) => {
buffer = nextBuffer;
},
};
socket.on('data', (chunk: Buffer<ArrayBufferLike> | string) => {
const chunkBuffer = typeof chunk === 'string' ? Buffer.from(chunk, 'utf8') : chunk;
const combined = Buffer.concat([context.getBuffer(), chunkBuffer]);
const decoded = decodeGatewayTcpFrames(combined);
context.setBuffer(decoded.remainder);
for (const frame of decoded.frames) {
handler(context, frame as Record<string, unknown>);
}
});
});
await new Promise<void>((resolve) => {
server.listen(0, '127.0.0.1', () => resolve());
});
const address = server.address();
if (!address || typeof address === 'string') {
throw new Error('Failed to get TCP server address');
}
return {server, port: address.port};
}
async function stopTestServer(server: Server): Promise<void> {
await new Promise<void>((resolve) => {
server.close(() => resolve());
});
}
function sendFrame(socket: Socket, frame: Record<string, unknown>): void {
socket.write(encodeGatewayTcpFrame(frame));
}
describe('GatewayTcpRpcTransport', () => {
let tcpServer: TestTcpServer | null = null;
let transport: GatewayTcpRpcTransport | null = null;
afterEach(async () => {
if (transport) {
await transport.destroy();
transport = null;
}
if (tcpServer) {
await stopTestServer(tcpServer.server);
tcpServer = null;
}
});
beforeEach(() => {
vi.useRealTimers();
});
test('multiplexes concurrent requests and matches out-of-order responses', async () => {
let requestOneId: string | null = null;
let requestTwoId: string | null = null;
tcpServer = await startTestServer((context, frame) => {
if (frame.type === 'hello') {
sendFrame(context.socket, {
type: 'hello_ack',
protocol: 'fluxer.rpc.tcp.v1',
ping_interval_ms: 60000,
});
return;
}
if (frame.type === 'request') {
const requestId = String(frame.id);
const method = String(frame.method);
if (method === 'first') {
requestOneId = requestId;
}
if (method === 'second') {
requestTwoId = requestId;
}
if (requestOneId && requestTwoId) {
sendFrame(context.socket, {
type: 'response',
id: requestTwoId,
ok: true,
result: 'second-result',
});
sendFrame(context.socket, {
type: 'response',
id: requestOneId,
ok: true,
result: 'first-result',
});
}
}
});
transport = new GatewayTcpRpcTransport({
host: '127.0.0.1',
port: tcpServer.port,
authorization: 'Bearer test-secret',
connectTimeoutMs: 300,
requestTimeoutMs: 2000,
defaultPingIntervalMs: 60000,
logger: createNoopLogger(),
});
const firstPromise = transport.call('first', {});
const secondPromise = transport.call('second', {});
const [firstResult, secondResult] = await Promise.all([firstPromise, secondPromise]);
expect(firstResult).toBe('first-result');
expect(secondResult).toBe('second-result');
});
test('reconnects automatically after server-side disconnect', async () => {
let connectionCount = 0;
let firstRequestSeen = false;
tcpServer = await startTestServer((context, frame) => {
if (frame.type === 'hello') {
connectionCount += 1;
sendFrame(context.socket, {
type: 'hello_ack',
protocol: 'fluxer.rpc.tcp.v1',
ping_interval_ms: 60000,
});
return;
}
if (frame.type === 'request') {
const requestId = String(frame.id);
if (!firstRequestSeen) {
firstRequestSeen = true;
sendFrame(context.socket, {
type: 'response',
id: requestId,
ok: true,
result: 'first-response',
});
setTimeout(() => {
context.socket.destroy();
}, 5);
return;
}
sendFrame(context.socket, {
type: 'response',
id: requestId,
ok: true,
result: 'second-response',
});
}
});
transport = new GatewayTcpRpcTransport({
host: '127.0.0.1',
port: tcpServer.port,
authorization: 'Bearer test-secret',
connectTimeoutMs: 300,
requestTimeoutMs: 2000,
defaultPingIntervalMs: 60000,
logger: createNoopLogger(),
});
const firstResult = await transport.call('first', {});
expect(firstResult).toBe('first-response');
await new Promise((resolve) => setTimeout(resolve, 20));
const secondResult = await transport.call('second', {});
expect(secondResult).toBe('second-response');
expect(connectionCount).toBeGreaterThanOrEqual(2);
});
test('rejects new requests when pending queue is full', async () => {
tcpServer = await startTestServer((context, frame) => {
if (frame.type === 'hello') {
sendFrame(context.socket, {
type: 'hello_ack',
protocol: 'fluxer.rpc.tcp.v1',
ping_interval_ms: 60000,
});
return;
}
if (frame.type !== 'request') {
return;
}
const requestId = String(frame.id);
const method = String(frame.method);
if (method === 'first') {
setTimeout(() => {
sendFrame(context.socket, {
type: 'response',
id: requestId,
ok: true,
result: 'first-response',
});
}, 40);
return;
}
sendFrame(context.socket, {
type: 'response',
id: requestId,
ok: true,
result: 'unexpected-response',
});
});
transport = new GatewayTcpRpcTransport({
host: '127.0.0.1',
port: tcpServer.port,
authorization: 'Bearer test-secret',
connectTimeoutMs: 300,
requestTimeoutMs: 2000,
defaultPingIntervalMs: 60000,
maxPendingRequests: 1,
logger: createNoopLogger(),
});
const firstPromise = transport.call('first', {});
await expect(transport.call('second', {})).rejects.toThrow('Gateway TCP request queue is full');
await expect(firstPromise).resolves.toBe('first-response');
});
test('closes the connection when the input buffer exceeds limit', async () => {
tcpServer = await startTestServer((context, frame) => {
if (frame.type === 'hello') {
sendFrame(context.socket, {
type: 'hello_ack',
protocol: 'fluxer.rpc.tcp.v1',
ping_interval_ms: 60000,
});
return;
}
if (frame.type === 'request') {
context.socket.write(Buffer.from('9999999999999999999999999999999999999999999999999999999999999999999999'));
}
});
transport = new GatewayTcpRpcTransport({
host: '127.0.0.1',
port: tcpServer.port,
authorization: 'Bearer test-secret',
connectTimeoutMs: 300,
requestTimeoutMs: 2000,
defaultPingIntervalMs: 60000,
maxBufferBytes: 64,
logger: createNoopLogger(),
});
await expect(transport.call('overflow', {})).rejects.toThrow('Gateway TCP input buffer exceeded maximum size');
});
});

View File

@@ -55,8 +55,8 @@ function mapDiscoveryRowToResponse(row: GuildDiscoveryRow) {
description: row.description,
category_type: row.category_type,
applied_at: row.applied_at.toISOString(),
reviewed_at: row.reviewed_at?.toISOString() ?? null,
review_reason: row.review_reason ?? null,
reviewed_at: row.removed_at?.toISOString() ?? row.reviewed_at?.toISOString() ?? null,
review_reason: row.removal_reason ?? row.review_reason ?? null,
};
}

View File

@@ -99,18 +99,17 @@ export class GuildContentRepository extends IGuildContentRepository {
const emojiId = data.emoji_id;
const result = await executeVersionedUpdate<GuildEmojiRow, 'guild_id' | 'emoji_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<GuildEmojiRow>(FETCH_GUILD_EMOJI_BY_ID_QUERY, {
async () =>
fetchOne<GuildEmojiRow>(FETCH_GUILD_EMOJI_BY_ID_QUERY, {
guild_id: guildId,
emoji_id: emojiId,
});
},
}),
(current) => ({
pk: {guild_id: guildId, emoji_id: emojiId},
patch: buildPatchFromData(data, current, GUILD_EMOJI_COLUMNS, ['guild_id', 'emoji_id']),
}),
GuildEmojis,
{initialData: oldData},
);
await fetchOne(GuildEmojisByEmojiId.insert(data));
@@ -164,18 +163,17 @@ export class GuildContentRepository extends IGuildContentRepository {
const stickerId = data.sticker_id;
const result = await executeVersionedUpdate<GuildStickerRow, 'guild_id' | 'sticker_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<GuildStickerRow>(FETCH_GUILD_STICKER_BY_ID_QUERY, {
async () =>
fetchOne<GuildStickerRow>(FETCH_GUILD_STICKER_BY_ID_QUERY, {
guild_id: guildId,
sticker_id: stickerId,
});
},
}),
(current) => ({
pk: {guild_id: guildId, sticker_id: stickerId},
patch: buildPatchFromData(data, current, GUILD_STICKER_COLUMNS, ['guild_id', 'sticker_id']),
}),
GuildStickers,
{initialData: oldData},
);
await fetchOne(GuildStickersByStickerId.insert(data));

View File

@@ -118,15 +118,13 @@ export class GuildDataRepository extends IGuildDataRepository {
const guildId = data.guild_id;
const result = await executeVersionedUpdate<GuildRow, 'guild_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<GuildRow>(FETCH_GUILD_BY_ID_QUERY, {guild_id: guildId});
},
async () => fetchOne<GuildRow>(FETCH_GUILD_BY_ID_QUERY, {guild_id: guildId}),
(current) => ({
pk: {guild_id: guildId},
patch: buildPatchFromData(data, current, GUILD_COLUMNS, ['guild_id']),
}),
Guilds,
{initialData: oldData},
);
return new Guild({...data, version: result.finalVersion ?? 0});

View File

@@ -75,18 +75,17 @@ export class GuildMemberRepository extends IGuildMemberRepository {
const userId = data.user_id;
const result = await executeVersionedUpdate<GuildMemberRow, 'guild_id' | 'user_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<GuildMemberRow>(FETCH_GUILD_MEMBER_BY_GUILD_AND_USER_ID_QUERY, {
async () =>
fetchOne<GuildMemberRow>(FETCH_GUILD_MEMBER_BY_GUILD_AND_USER_ID_QUERY, {
guild_id: guildId,
user_id: userId,
});
},
}),
(current) => ({
pk: {guild_id: guildId, user_id: userId},
patch: buildPatchFromData(data, current, GUILD_MEMBER_COLUMNS, ['guild_id', 'user_id']),
}),
GuildMembers,
{initialData: oldData},
);
await fetchOne(

View File

@@ -82,18 +82,17 @@ export class GuildRoleRepository extends IGuildRoleRepository {
const roleId = data.role_id;
const result = await executeVersionedUpdate<GuildRoleRow, 'guild_id' | 'role_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<GuildRoleRow>(FETCH_GUILD_ROLE_BY_ID_QUERY, {
async () =>
fetchOne<GuildRoleRow>(FETCH_GUILD_ROLE_BY_ID_QUERY, {
guild_id: guildId,
role_id: roleId,
});
},
}),
(current) => ({
pk: {guild_id: guildId, role_id: roleId},
patch: buildPatchFromData(data, current, GUILD_ROLE_COLUMNS, ['guild_id', 'role_id']),
}),
GuildRoles,
{initialData: oldData},
);
return new GuildRole({...data, version: result.finalVersion ?? 1});

View File

@@ -24,6 +24,7 @@ import {mapGuildToGuildResponse} from '@fluxer/api/src/guild/GuildModel';
import type {IGuildDiscoveryRepository} from '@fluxer/api/src/guild/repositories/GuildDiscoveryRepository';
import type {IGuildRepositoryAggregate} from '@fluxer/api/src/guild/repositories/IGuildRepositoryAggregate';
import type {IGatewayService} from '@fluxer/api/src/infrastructure/IGatewayService';
import {Logger} from '@fluxer/api/src/Logger';
import type {IGuildSearchService} from '@fluxer/api/src/search/IGuildSearchService';
import {
DiscoveryApplicationStatus,
@@ -37,7 +38,7 @@ import {DiscoveryApplicationNotFoundError} from '@fluxer/errors/src/domains/disc
import {DiscoveryInsufficientMembersError} from '@fluxer/errors/src/domains/discovery/DiscoveryInsufficientMembersError';
import {DiscoveryInvalidCategoryError} from '@fluxer/errors/src/domains/discovery/DiscoveryInvalidCategoryError';
import {DiscoveryNotDiscoverableError} from '@fluxer/errors/src/domains/discovery/DiscoveryNotDiscoverableError';
import type {GuildSearchFilters} from '@fluxer/schema/src/contracts/search/SearchDocumentTypes';
import type {GuildSearchFilters} from '@fluxer/schema/src/contracts/search/SearchDocumentTypes.jsx';
import type {DiscoveryApplicationPatchRequest} from '@fluxer/schema/src/domains/guild/GuildDiscoverySchemas';
const VALID_CATEGORY_TYPES = new Set<number>(Object.values(DiscoveryCategories));
@@ -148,9 +149,6 @@ export class GuildDiscoveryService extends IGuildDiscoveryService {
removal_reason: null,
};
if (existing) {
await this.discoveryRepository.deleteByGuildId(guildId, existing.status, existing.applied_at);
}
await this.discoveryRepository.upsert(row);
return row;
@@ -337,16 +335,14 @@ export class GuildDiscoveryService extends IGuildDiscoveryService {
limit: number;
offset: number;
}): Promise<{guilds: Array<DiscoveryGuildResult>; total: number}> {
if (this.guildSearchService) {
const sortByMap: Record<string, GuildSearchFilters['sortBy']> = {
member_count: 'memberCount',
online_count: 'onlineCount',
};
let guilds: Array<DiscoveryGuildResult>;
let total: number;
if (this.guildSearchService) {
const filters: GuildSearchFilters = {
isDiscoverable: true,
discoveryCategory: params.categoryId,
sortBy: sortByMap[params.sortBy ?? ''] ?? 'relevance',
sortBy: 'relevance',
sortOrder: 'desc',
};
@@ -355,54 +351,75 @@ export class GuildDiscoveryService extends IGuildDiscoveryService {
offset: params.offset,
});
const guilds: Array<DiscoveryGuildResult> = results.hits.map((hit) => ({
guilds = results.hits.map((hit) => ({
id: hit.id,
name: hit.name,
icon: hit.iconHash,
description: hit.discoveryDescription,
category_type: hit.discoveryCategory ?? 0,
member_count: hit.memberCount,
online_count: hit.onlineCount,
member_count: 0,
online_count: 0,
features: hit.features,
verification_level: hit.verificationLevel,
}));
return {guilds, total: results.total};
}
total = results.total;
} else {
const statusRows = await this.discoveryRepository.listByStatus(
DiscoveryApplicationStatus.APPROVED,
params.limit + params.offset,
);
const statusRows = await this.discoveryRepository.listByStatus(
DiscoveryApplicationStatus.APPROVED,
params.limit + params.offset,
);
const paginatedRows = statusRows.slice(params.offset, params.offset + params.limit);
guilds = [];
const paginatedRows = statusRows.slice(params.offset, params.offset + params.limit);
const guilds: Array<DiscoveryGuildResult> = [];
for (const statusRow of paginatedRows) {
const discoveryRow = await this.discoveryRepository.findByGuildId(statusRow.guild_id);
if (!discoveryRow) continue;
for (const statusRow of paginatedRows) {
const discoveryRow = await this.discoveryRepository.findByGuildId(statusRow.guild_id);
if (!discoveryRow) continue;
if (params.categoryId !== undefined && discoveryRow.category_type !== params.categoryId) {
continue;
}
if (params.categoryId !== undefined && discoveryRow.category_type !== params.categoryId) {
continue;
const guild = await this.guildRepository.findUnique(statusRow.guild_id);
if (!guild) continue;
guilds.push({
id: statusRow.guild_id.toString(),
name: guild.name,
icon: guild.iconHash,
description: discoveryRow.description,
category_type: discoveryRow.category_type,
member_count: guild.memberCount,
online_count: 0,
features: Array.from(guild.features),
verification_level: guild.verificationLevel,
});
}
const guild = await this.guildRepository.findUnique(statusRow.guild_id);
if (!guild) continue;
guilds.push({
id: statusRow.guild_id.toString(),
name: guild.name,
icon: guild.iconHash,
description: discoveryRow.description,
category_type: discoveryRow.category_type,
member_count: guild.memberCount,
online_count: 0,
features: Array.from(guild.features),
verification_level: guild.verificationLevel,
});
total = statusRows.length;
}
return {guilds, total: statusRows.length};
if (guilds.length > 0) {
try {
const guildIds = guilds.map((g) => BigInt(g.id) as GuildID);
const freshCounts = await this.gatewayService.getDiscoveryGuildCounts(guildIds);
for (const guild of guilds) {
const counts = freshCounts.get(BigInt(guild.id) as GuildID);
if (counts) {
guild.member_count = counts.memberCount;
guild.online_count = counts.onlineCount;
}
}
} catch (error) {
Logger.warn(
{error: error instanceof Error ? error.message : String(error)},
'[discovery] Failed to fetch fresh guild counts from gateway, using stale values',
);
}
}
return {guilds, total};
}
private async addDiscoverableFeature(guildId: GuildID): Promise<void> {

View File

@@ -397,4 +397,45 @@ export class DirectS3StorageService implements IStorageService {
this.expirationManager.clearExpiration(params.bucket, key);
}
}
async createMultipartUpload(params: {
bucket: string;
key: string;
contentType?: string;
}): Promise<{uploadId: string}> {
const result = await this.s3Service.createMultipartUpload(params.bucket, params.key, {
contentType: params.contentType,
});
return {uploadId: result.uploadId};
}
async uploadPart(params: {
bucket: string;
key: string;
uploadId: string;
partNumber: number;
body: Uint8Array;
}): Promise<{etag: string}> {
const result = await this.s3Service.uploadPart(
params.bucket,
params.key,
params.uploadId,
params.partNumber,
Buffer.from(params.body),
);
return {etag: result.etag};
}
async completeMultipartUpload(params: {
bucket: string;
key: string;
uploadId: string;
parts: Array<{partNumber: number; etag: string}>;
}): Promise<void> {
await this.s3Service.completeMultipartUpload(params.bucket, params.key, params.uploadId, params.parts);
}
async abortMultipartUpload(params: {bucket: string; key: string; uploadId: string}): Promise<void> {
await this.s3Service.abortMultipartUpload(params.bucket, params.key, params.uploadId);
}
}

View File

@@ -19,40 +19,27 @@
import {Config} from '@fluxer/api/src/Config';
import {GatewayRpcMethodError, GatewayRpcMethodErrorCodes} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import {GatewayTcpRpcTransport, GatewayTcpTransportError} from '@fluxer/api/src/infrastructure/GatewayTcpRpcTransport';
import type {IGatewayRpcTransport} from '@fluxer/api/src/infrastructure/IGatewayRpcTransport';
import type {CallData} from '@fluxer/api/src/infrastructure/IGatewayService';
import {NatsGatewayRpcTransport} from '@fluxer/api/src/infrastructure/NatsGatewayRpcTransport';
import {Logger} from '@fluxer/api/src/Logger';
import {NatsConnectionManager} from '@fluxer/nats/src/NatsConnectionManager';
import {recordCounter, recordHistogram} from '@fluxer/telemetry/src/Metrics';
import {ms} from 'itty-time';
interface GatewayRpcResponse {
result?: unknown;
error?: unknown;
}
const MAX_RETRY_ATTEMPTS = 3;
const TCP_FALLBACK_COOLDOWN_MS = ms('5 seconds');
const TCP_CONNECT_TIMEOUT_MS = 150;
const TCP_REQUEST_TIMEOUT_MS = ms('10 seconds');
const TCP_DEFAULT_PING_INTERVAL_MS = ms('15 seconds');
const TCP_MAX_PENDING_REQUESTS = 1024;
const TCP_MAX_BUFFER_BYTES = 2 * 1024 * 1024;
interface GatewayRpcClientOptions {
tcpTransport?: IGatewayRpcTransport;
transport?: IGatewayRpcTransport;
}
export class GatewayRpcClient {
private static instance: GatewayRpcClient | null = null;
private readonly httpEndpoint: string;
private readonly tcpTransport: IGatewayRpcTransport;
private tcpFallbackUntilMs = 0;
private readonly transport: IGatewayRpcTransport;
private constructor(options?: GatewayRpcClientOptions) {
this.httpEndpoint = `${Config.gateway.rpcEndpoint}/_rpc`;
this.tcpTransport = options?.tcpTransport ?? this.createGatewayTcpTransport();
this.transport = options?.transport ?? createNatsTransportSync();
}
static getInstance(): GatewayRpcClient {
@@ -62,29 +49,20 @@ export class GatewayRpcClient {
return GatewayRpcClient.instance;
}
static createForTests(transport: IGatewayRpcTransport): GatewayRpcClient {
const client = new GatewayRpcClient({transport});
GatewayRpcClient.instance = client;
return client;
}
static async resetForTests(): Promise<void> {
if (!GatewayRpcClient.instance) {
return;
}
await GatewayRpcClient.instance.tcpTransport.destroy();
await GatewayRpcClient.instance.transport.destroy();
GatewayRpcClient.instance = null;
}
private createGatewayTcpTransport(): GatewayTcpRpcTransport {
const endpointUrl = new URL(Config.gateway.rpcEndpoint);
return new GatewayTcpRpcTransport({
host: endpointUrl.hostname,
port: Config.gateway.rpcTcpPort,
authorization: `Bearer ${Config.gateway.rpcSecret}`,
connectTimeoutMs: TCP_CONNECT_TIMEOUT_MS,
requestTimeoutMs: TCP_REQUEST_TIMEOUT_MS,
defaultPingIntervalMs: TCP_DEFAULT_PING_INTERVAL_MS,
maxPendingRequests: TCP_MAX_PENDING_REQUESTS,
maxBufferBytes: TCP_MAX_BUFFER_BYTES,
logger: Logger,
});
}
async call<T>(method: string, params: Record<string, unknown>): Promise<T> {
Logger.debug(`[gateway-rpc] calling ${method}`);
const startTime = Date.now();
@@ -127,71 +105,8 @@ export class GatewayRpcClient {
}
private async executeCall<T>(method: string, params: Record<string, unknown>): Promise<T> {
if (Date.now() >= this.tcpFallbackUntilMs) {
try {
const result = await this.tcpTransport.call(method, params);
return result as T;
} catch (error) {
if (!(error instanceof GatewayTcpTransportError)) {
throw error;
}
this.tcpFallbackUntilMs = Date.now() + TCP_FALLBACK_COOLDOWN_MS;
Logger.warn({error}, '[gateway-rpc] TCP transport unavailable, falling back to HTTP');
}
}
return this.executeHttpCall(method, params);
}
private async executeHttpCall<T>(method: string, params: Record<string, unknown>): Promise<T> {
let response: Response;
try {
response = await fetch(this.httpEndpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${Config.gateway.rpcSecret}`,
},
body: JSON.stringify({
method,
params,
}),
signal: AbortSignal.timeout(ms('10 seconds')),
});
} catch (error) {
if (error instanceof Error && error.name === 'TimeoutError') {
Logger.error({method}, '[gateway-rpc] request timed out after 10s');
} else {
Logger.error({error}, '[gateway-rpc] request failed to reach gateway');
}
throw error;
}
const text = await response.text();
let payload: GatewayRpcResponse = {};
if (text.length > 0) {
try {
payload = JSON.parse(text) as GatewayRpcResponse;
} catch (error) {
Logger.error({error, body: text, status: response.status}, '[gateway-rpc] failed to parse response body');
throw new Error('Malformed gateway RPC response');
}
}
if (!response.ok) {
if (typeof payload.error === 'string' && payload.error.length > 0) {
throw new GatewayRpcMethodError(payload.error);
}
throw new Error(`Gateway RPC request failed with status ${response.status}`);
}
if (!Object.hasOwn(payload, 'result')) {
Logger.error({status: response.status, body: payload}, '[gateway-rpc] response missing result value');
throw new Error('Malformed gateway RPC response');
}
return payload.result as T;
const result = await this.transport.call(method, params);
return result as T;
}
private calculateBackoff(attempt: number): number {
@@ -200,19 +115,29 @@ export class GatewayRpcClient {
}
private shouldRetry(error: unknown, method: string): boolean {
if (error instanceof GatewayTcpTransportError) {
if (this.isNatsConnectionError(error)) {
return true;
}
if (!(error instanceof Error)) {
return false;
}
if (error.name === 'TimeoutError') {
return true;
return this.isRetryableOverloadError(error, method);
}
private isNatsConnectionError(error: unknown): boolean {
if (!(error instanceof Error)) {
return false;
}
if (this.isRetryableOverloadError(error, method)) {
return true;
if (error instanceof GatewayRpcMethodError) {
return error.code === GatewayRpcMethodErrorCodes.NO_RESPONDERS;
}
return error.name === 'TypeError';
const message = error.message.toLowerCase();
return (
message.includes('connection closed') ||
message.includes('connection lost') ||
message.includes('reconnect') ||
message.includes('disconnect')
);
}
private isRetryableOverloadError(error: Error, method: string): boolean {
@@ -275,3 +200,15 @@ export class GatewayRpcClient {
return this.call('process.node_stats', {});
}
}
function createNatsTransportSync(): NatsGatewayRpcTransport {
const manager = new NatsConnectionManager({
url: Config.nats.coreUrl,
token: Config.nats.authToken || undefined,
name: 'fluxer-api-rpc',
});
void manager.connect().catch((error) => {
Logger.error({error}, '[gateway-rpc] Failed to establish NATS connection');
});
return new NatsGatewayRpcTransport(manager);
}

View File

@@ -21,6 +21,7 @@ export const GatewayRpcMethodErrorCodes = {
OVERLOADED: 'overloaded',
INTERNAL_ERROR: 'internal_error',
TIMEOUT: 'timeout',
NO_RESPONDERS: 'no_responders',
GUILD_NOT_FOUND: 'guild_not_found',
FORBIDDEN: 'forbidden',
CHANNEL_NOT_FOUND: 'channel_not_found',

View File

@@ -22,7 +22,6 @@ import {createChannelID, createRoleID, createUserID} from '@fluxer/api/src/Brand
import type {GatewayDispatchEvent} from '@fluxer/api/src/constants/Gateway';
import {GatewayRpcClient} from '@fluxer/api/src/infrastructure/GatewayRpcClient';
import {GatewayRpcMethodError, GatewayRpcMethodErrorCodes} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import {GatewayTcpTransportError} from '@fluxer/api/src/infrastructure/GatewayTcpRpcTransport';
import type {CallData} from '@fluxer/api/src/infrastructure/IGatewayService';
import {Logger} from '@fluxer/api/src/Logger';
import {CallAlreadyExistsError} from '@fluxer/errors/src/domains/channel/CallAlreadyExistsError';
@@ -322,16 +321,15 @@ export class GatewayService {
if (error.code === GatewayRpcMethodErrorCodes.OVERLOADED) {
return new ServiceUnavailableError();
}
if (error.code === GatewayRpcMethodErrorCodes.NO_RESPONDERS) {
return new ServiceUnavailableError();
}
if (error.code === GatewayRpcMethodErrorCodes.INTERNAL_ERROR) {
return new BadGatewayError();
}
return new BadGatewayError();
}
if (error instanceof GatewayTcpTransportError) {
return new ServiceUnavailableError();
}
if (error instanceof Error && error.name === 'TimeoutError') {
return new GatewayTimeoutError();
}
@@ -1257,6 +1255,24 @@ export class GatewayService {
return counts;
}
async getDiscoveryGuildCounts(
guildIds: Array<GuildID>,
): Promise<Map<GuildID, {memberCount: number; onlineCount: number}>> {
const result = await this.call<{
online_counts: Array<{guild_id: string; member_count: number; online_count: number}>;
}>('guild.get_online_counts_batch', {
guild_ids: guildIds.map(String),
});
const counts = new Map<GuildID, {memberCount: number; onlineCount: number}>();
for (const entry of result.online_counts) {
counts.set(BigInt(entry.guild_id) as GuildID, {
memberCount: entry.member_count,
onlineCount: entry.online_count,
});
}
return counts;
}
async getNodeStats(): Promise<{
status: string;
sessions: number;

View File

@@ -1,77 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
export const MAX_GATEWAY_TCP_FRAME_BYTES = 1024 * 1024;
export interface DecodedGatewayTcpFrames {
frames: Array<unknown>;
remainder: Buffer;
}
export function encodeGatewayTcpFrame(frame: unknown, maxFrameBytes = MAX_GATEWAY_TCP_FRAME_BYTES): Buffer {
const payloadText = JSON.stringify(frame);
const payload = Buffer.from(payloadText, 'utf8');
if (payload.length > maxFrameBytes) {
throw new Error('Gateway TCP frame exceeds maximum size');
}
const header = Buffer.from(`${payload.length}\n`, 'utf8');
return Buffer.concat([header, payload]);
}
export function decodeGatewayTcpFrames(
buffer: Buffer,
maxFrameBytes = MAX_GATEWAY_TCP_FRAME_BYTES,
): DecodedGatewayTcpFrames {
let offset = 0;
const frames: Array<unknown> = [];
while (offset < buffer.length) {
const newlineIndex = buffer.indexOf(0x0a, offset);
if (newlineIndex < 0) {
break;
}
const lengthText = buffer.subarray(offset, newlineIndex).toString('utf8');
const frameLength = Number.parseInt(lengthText, 10);
if (!Number.isFinite(frameLength) || frameLength < 0 || frameLength > maxFrameBytes) {
throw new Error('Invalid Gateway TCP frame length');
}
const payloadStart = newlineIndex + 1;
const payloadEnd = payloadStart + frameLength;
if (payloadEnd > buffer.length) {
break;
}
const payload = buffer.subarray(payloadStart, payloadEnd).toString('utf8');
let frame: unknown;
try {
frame = JSON.parse(payload);
} catch {
throw new Error('Invalid Gateway TCP frame JSON');
}
frames.push(frame);
offset = payloadEnd;
}
return {
frames,
remainder: buffer.subarray(offset),
};
}

View File

@@ -1,528 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createConnection, type Socket} from 'node:net';
import type {ILogger} from '@fluxer/api/src/ILogger';
import {GatewayRpcMethodError} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import {
decodeGatewayTcpFrames,
encodeGatewayTcpFrame,
MAX_GATEWAY_TCP_FRAME_BYTES,
} from '@fluxer/api/src/infrastructure/GatewayTcpFrameCodec';
interface GatewayTcpRequestFrame {
type: 'request';
id: string;
method: string;
params: Record<string, unknown>;
}
interface GatewayTcpHelloFrame {
type: 'hello';
protocol: 'fluxer.rpc.tcp.v1';
authorization: string;
}
interface GatewayTcpHelloAckFrame {
type: 'hello_ack';
protocol: string;
max_in_flight?: number;
ping_interval_ms?: number;
}
interface GatewayTcpResponseFrame {
type: 'response';
id: string;
ok: boolean;
result?: unknown;
error?: unknown;
}
interface GatewayTcpErrorFrame {
type: 'error';
error?: unknown;
}
interface GatewayTcpPingFrame {
type: 'ping' | 'pong';
}
type TcpBuffer = Buffer<ArrayBufferLike>;
interface GatewayTcpRpcTransportOptions {
host: string;
port: number;
authorization: string;
connectTimeoutMs: number;
requestTimeoutMs: number;
defaultPingIntervalMs: number;
maxFrameBytes?: number;
maxPendingRequests?: number;
maxBufferBytes?: number;
logger: ILogger;
}
interface PendingGatewayTcpRequest {
resolve: (value: unknown) => void;
reject: (error: Error) => void;
timeout: NodeJS.Timeout;
}
const GATEWAY_TCP_PROTOCOL = 'fluxer.rpc.tcp.v1';
function isHelloAckFrame(frame: unknown): frame is GatewayTcpHelloAckFrame {
if (!frame || typeof frame !== 'object') {
return false;
}
const candidate = frame as {type?: unknown; protocol?: unknown};
return candidate.type === 'hello_ack' && typeof candidate.protocol === 'string';
}
function isResponseFrame(frame: unknown): frame is GatewayTcpResponseFrame {
if (!frame || typeof frame !== 'object') {
return false;
}
const candidate = frame as {type?: unknown; id?: unknown; ok?: unknown};
return candidate.type === 'response' && typeof candidate.id === 'string' && typeof candidate.ok === 'boolean';
}
function isErrorFrame(frame: unknown): frame is GatewayTcpErrorFrame {
if (!frame || typeof frame !== 'object') {
return false;
}
const candidate = frame as {type?: unknown};
return candidate.type === 'error';
}
export class GatewayTcpTransportError extends Error {
constructor(message: string) {
super(message);
this.name = 'GatewayTcpTransportError';
}
}
export class GatewayTcpRpcTransport {
private readonly options: GatewayTcpRpcTransportOptions;
private readonly maxFrameBytes: number;
private readonly maxPendingRequests: number;
private readonly maxBufferBytes: number;
private socket: Socket | null = null;
private connectPromise: Promise<void> | null = null;
private buffer: TcpBuffer = Buffer.alloc(0);
private nextRequestId = 1;
private pendingRequests = new Map<string, PendingGatewayTcpRequest>();
private negotiatedMaxPendingRequests: number | null = null;
private resolveHelloAck: (() => void) | null = null;
private rejectHelloAck: ((error: Error) => void) | null = null;
private pingTimer: NodeJS.Timeout | null = null;
private destroyed = false;
private helloAcknowledged = false;
constructor(options: GatewayTcpRpcTransportOptions) {
this.options = options;
this.maxFrameBytes = options.maxFrameBytes ?? MAX_GATEWAY_TCP_FRAME_BYTES;
this.maxPendingRequests = Math.max(1, options.maxPendingRequests ?? 1024);
this.maxBufferBytes = Math.max(1, options.maxBufferBytes ?? this.maxFrameBytes * 2);
}
async call(method: string, params: Record<string, unknown>): Promise<unknown> {
if (this.destroyed) {
throw new GatewayTcpTransportError('Gateway TCP transport is destroyed');
}
await this.ensureConnected();
const maxPendingRequests = this.getEffectiveMaxPendingRequests();
if (this.pendingRequests.size >= maxPendingRequests) {
throw new GatewayTcpTransportError(
`Gateway TCP request queue is full (pending=${this.pendingRequests.size}, limit=${maxPendingRequests})`,
);
}
const requestId = `${this.nextRequestId}`;
this.nextRequestId += 1;
const responsePromise = new Promise<unknown>((resolve, reject) => {
const timeout = setTimeout(() => {
this.pendingRequests.delete(requestId);
const error = new GatewayTcpTransportError('Gateway TCP request timed out');
reject(error);
this.closeSocket(error);
}, this.options.requestTimeoutMs);
this.pendingRequests.set(requestId, {resolve, reject, timeout});
});
const frame: GatewayTcpRequestFrame = {
type: 'request',
id: requestId,
method,
params,
};
try {
await this.sendFrame(frame);
} catch (error) {
const pending = this.pendingRequests.get(requestId);
if (pending) {
clearTimeout(pending.timeout);
this.pendingRequests.delete(requestId);
pending.reject(this.toTransportError(error, 'Gateway TCP send failed'));
}
throw this.toTransportError(error, 'Gateway TCP send failed');
}
return responsePromise;
}
async destroy(): Promise<void> {
this.destroyed = true;
this.closeSocket(new GatewayTcpTransportError('Gateway TCP transport destroyed'));
if (this.connectPromise) {
try {
await this.connectPromise;
} catch {}
}
}
private async ensureConnected(): Promise<void> {
if (this.socket && !this.socket.destroyed && this.helloAcknowledged) {
return;
}
if (this.connectPromise) {
await this.connectPromise;
return;
}
this.connectPromise = this.openConnection();
try {
await this.connectPromise;
} finally {
this.connectPromise = null;
}
}
private async openConnection(): Promise<void> {
const socket = createConnection({
host: this.options.host,
port: this.options.port,
});
socket.setNoDelay(true);
socket.setKeepAlive(true);
socket.on('data', (data: Buffer) => {
this.handleSocketData(socket, data);
});
socket.on('error', (error: Error) => {
this.handleSocketError(socket, error);
});
socket.on('close', () => {
this.handleSocketClose(socket);
});
await this.waitForSocketConnect(socket);
this.socket = socket;
this.buffer = Buffer.alloc(0);
this.helloAcknowledged = false;
const helloAckPromise = new Promise<void>((resolve, reject) => {
this.resolveHelloAck = resolve;
this.rejectHelloAck = reject;
});
const helloFrame: GatewayTcpHelloFrame = {
type: 'hello',
protocol: GATEWAY_TCP_PROTOCOL,
authorization: this.options.authorization,
};
await this.sendFrame(helloFrame);
await helloAckPromise;
}
private async waitForSocketConnect(socket: Socket): Promise<void> {
await new Promise<void>((resolve, reject) => {
let settled = false;
const onConnect = () => {
if (settled) {
return;
}
settled = true;
clearTimeout(timeoutHandle);
socket.off('error', onError);
resolve();
};
const onError = (error: Error) => {
if (settled) {
return;
}
settled = true;
clearTimeout(timeoutHandle);
socket.off('connect', onConnect);
reject(new GatewayTcpTransportError(error.message));
};
const timeoutHandle = setTimeout(() => {
if (settled) {
return;
}
settled = true;
socket.off('connect', onConnect);
socket.off('error', onError);
socket.destroy();
reject(new GatewayTcpTransportError('Gateway TCP connect timeout'));
}, this.options.connectTimeoutMs);
socket.once('connect', onConnect);
socket.once('error', onError);
});
}
private handleSocketData(socket: Socket, data: TcpBuffer): void {
if (this.socket !== socket) {
return;
}
const nextBufferSize = this.buffer.length + data.length;
if (nextBufferSize > this.maxBufferBytes) {
this.options.logger.warn(
{
bufferBytes: nextBufferSize,
maxBufferBytes: this.maxBufferBytes,
},
'[gateway-rpc-tcp] input buffer limit exceeded',
);
this.closeSocket(
new GatewayTcpTransportError(
`Gateway TCP input buffer exceeded maximum size (buffer_bytes=${nextBufferSize}, max_buffer_bytes=${this.maxBufferBytes})`,
),
);
return;
}
this.buffer = Buffer.concat([this.buffer, data]);
let decodedFrames: ReturnType<typeof decodeGatewayTcpFrames>;
try {
decodedFrames = decodeGatewayTcpFrames(this.buffer, this.maxFrameBytes);
} catch (error) {
const transportError = this.toTransportError(error, 'Gateway TCP protocol decode failed');
this.closeSocket(transportError);
return;
}
this.buffer = decodedFrames.remainder;
for (const frame of decodedFrames.frames) {
this.handleIncomingFrame(frame);
}
}
private handleIncomingFrame(frame: unknown): void {
if (!frame || typeof frame !== 'object') {
this.closeSocket(new GatewayTcpTransportError('Gateway TCP frame is not an object'));
return;
}
const frameMap = frame as Record<string, unknown>;
if (isHelloAckFrame(frameMap)) {
this.handleHelloAckFrame(frameMap);
return;
}
if (isResponseFrame(frameMap)) {
this.handleResponseFrame(frameMap);
return;
}
if (isErrorFrame(frameMap)) {
const errorFrame = frameMap;
const message =
typeof errorFrame.error === 'string' && errorFrame.error.length > 0
? errorFrame.error
: 'Gateway TCP returned an error frame';
this.closeSocket(new GatewayTcpTransportError(message));
return;
}
if (frameMap.type === 'ping') {
void this.sendFrame({type: 'pong'} satisfies GatewayTcpPingFrame).catch((error) => {
this.closeSocket(this.toTransportError(error, 'Gateway TCP pong send failed'));
});
return;
}
if (frameMap.type === 'pong') {
return;
}
this.closeSocket(new GatewayTcpTransportError('Gateway TCP received unknown frame type'));
}
private handleHelloAckFrame(frame: GatewayTcpHelloAckFrame): void {
if (frame.protocol !== GATEWAY_TCP_PROTOCOL) {
this.closeSocket(new GatewayTcpTransportError('Gateway TCP protocol mismatch'));
return;
}
this.negotiatedMaxPendingRequests = this.resolveNegotiatedMaxPendingRequests(frame.max_in_flight);
this.helloAcknowledged = true;
this.startPingTimer(frame.ping_interval_ms);
if (this.resolveHelloAck) {
this.resolveHelloAck();
this.resolveHelloAck = null;
this.rejectHelloAck = null;
}
}
private handleResponseFrame(frame: GatewayTcpResponseFrame): void {
if (typeof frame.id !== 'string') {
this.closeSocket(new GatewayTcpTransportError('Gateway TCP response missing request id'));
return;
}
const pending = this.pendingRequests.get(frame.id);
if (!pending) {
return;
}
clearTimeout(pending.timeout);
this.pendingRequests.delete(frame.id);
if (frame.ok) {
pending.resolve(frame.result);
return;
}
if (typeof frame.error === 'string' && frame.error.length > 0) {
pending.reject(new GatewayRpcMethodError(frame.error));
return;
}
pending.reject(new Error('Gateway RPC request failed'));
}
private async sendFrame(frame: unknown): Promise<void> {
const socket = this.socket;
if (!socket || socket.destroyed) {
throw new GatewayTcpTransportError('Gateway TCP socket is not connected');
}
const encoded = encodeGatewayTcpFrame(frame, this.maxFrameBytes);
await new Promise<void>((resolve, reject) => {
const onError = (error: Error) => {
socket.off('drain', onDrain);
reject(error);
};
const onDrain = () => {
socket.off('error', onError);
resolve();
};
socket.once('error', onError);
const canWrite = socket.write(encoded, () => {
if (canWrite) {
socket.off('error', onError);
resolve();
}
});
if (!canWrite) {
socket.once('drain', onDrain);
}
});
}
private startPingTimer(pingIntervalMs?: number): void {
this.stopPingTimer();
const intervalMs = pingIntervalMs ?? this.options.defaultPingIntervalMs;
this.pingTimer = setInterval(() => {
void this.sendFrame({type: 'ping'} satisfies GatewayTcpPingFrame).catch((error) => {
this.closeSocket(this.toTransportError(error, 'Gateway TCP ping failed'));
});
}, intervalMs);
}
private stopPingTimer(): void {
if (!this.pingTimer) {
return;
}
clearInterval(this.pingTimer);
this.pingTimer = null;
}
private handleSocketError(socket: Socket, error: Error): void {
if (this.socket !== socket) {
return;
}
const transportError = this.toTransportError(error, 'Gateway TCP socket error');
this.options.logger.warn({error: transportError}, '[gateway-rpc-tcp] socket error');
this.closeSocket(transportError);
}
private handleSocketClose(socket: Socket): void {
if (this.socket !== socket) {
return;
}
this.closeSocket(new GatewayTcpTransportError('Gateway TCP socket closed'));
}
private closeSocket(error: Error): void {
this.stopPingTimer();
this.helloAcknowledged = false;
this.buffer = Buffer.alloc(0);
this.negotiatedMaxPendingRequests = null;
if (this.rejectHelloAck) {
this.rejectHelloAck(error);
this.resolveHelloAck = null;
this.rejectHelloAck = null;
}
for (const pending of this.pendingRequests.values()) {
clearTimeout(pending.timeout);
pending.reject(error);
}
this.pendingRequests.clear();
const socket = this.socket;
this.socket = null;
if (socket && !socket.destroyed) {
socket.destroy();
}
}
private toTransportError(error: unknown, fallbackMessage: string): GatewayTcpTransportError {
if (error instanceof GatewayTcpTransportError) {
return error;
}
if (error instanceof Error && error.message.length > 0) {
return new GatewayTcpTransportError(error.message);
}
return new GatewayTcpTransportError(fallbackMessage);
}
private getEffectiveMaxPendingRequests(): number {
if (this.negotiatedMaxPendingRequests === null) {
return this.maxPendingRequests;
}
return this.negotiatedMaxPendingRequests;
}
private resolveNegotiatedMaxPendingRequests(maxInFlight: unknown): number | null {
if (typeof maxInFlight !== 'number' || !Number.isInteger(maxInFlight) || maxInFlight <= 0) {
return null;
}
return Math.min(this.maxPendingRequests, maxInFlight);
}
}

View File

@@ -253,6 +253,10 @@ export abstract class IGatewayService {
abstract getDiscoveryOnlineCounts(guildIds: Array<GuildID>): Promise<Map<GuildID, number>>;
abstract getDiscoveryGuildCounts(
guildIds: Array<GuildID>,
): Promise<Map<GuildID, {memberCount: number; onlineCount: number}>>;
abstract getNodeStats(): Promise<{
status: string;
sessions: number;

View File

@@ -88,4 +88,23 @@ export interface IStorageService {
>;
deleteObjects(params: {bucket: string; objects: ReadonlyArray<{Key: string}>}): Promise<void>;
createMultipartUpload(params: {bucket: string; key: string; contentType?: string}): Promise<{uploadId: string}>;
uploadPart(params: {
bucket: string;
key: string;
uploadId: string;
partNumber: number;
body: Uint8Array;
}): Promise<{etag: string}>;
completeMultipartUpload(params: {
bucket: string;
key: string;
uploadId: string;
parts: Array<{partNumber: number; etag: string}>;
}): Promise<void>;
abortMultipartUpload(params: {bucket: string; key: string; uploadId: string}): Promise<void>;
}

View File

@@ -0,0 +1,75 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {GatewayRpcMethodError, GatewayRpcMethodErrorCodes} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import type {IGatewayRpcTransport} from '@fluxer/api/src/infrastructure/IGatewayRpcTransport';
import type {INatsConnectionManager} from '@fluxer/nats/src/INatsConnectionManager';
import {type Msg, StringCodec} from 'nats';
const NATS_REQUEST_TIMEOUT_MS = 10_000;
const NATS_SUBJECT_PREFIX = 'rpc.gateway.';
interface NatsRpcResponse {
ok: boolean;
result?: unknown;
error?: string;
}
export class NatsGatewayRpcTransport implements IGatewayRpcTransport {
private readonly connectionManager: INatsConnectionManager;
private readonly codec = StringCodec();
constructor(connectionManager: INatsConnectionManager) {
this.connectionManager = connectionManager;
}
async call(method: string, params: Record<string, unknown>): Promise<unknown> {
const subject = `${NATS_SUBJECT_PREFIX}${method}`;
const payload = this.codec.encode(JSON.stringify(params));
let responseMsg: Msg;
try {
const connection = this.connectionManager.getConnection();
responseMsg = await connection.request(subject, payload, {timeout: NATS_REQUEST_TIMEOUT_MS});
} catch (error) {
if (error instanceof Error) {
if (error.message === 'NO_RESPONDERS' || error.name === 'NoRespondersError') {
throw new GatewayRpcMethodError(GatewayRpcMethodErrorCodes.NO_RESPONDERS);
}
if (error.message === 'TIMEOUT' || error.name === 'TimeoutError') {
throw new GatewayRpcMethodError(GatewayRpcMethodErrorCodes.TIMEOUT);
}
}
throw error;
}
const responseText = this.codec.decode(responseMsg.data);
const response = JSON.parse(responseText) as NatsRpcResponse;
if (!response.ok) {
throw new GatewayRpcMethodError(response.error ?? GatewayRpcMethodErrorCodes.INTERNAL_ERROR);
}
return response.result;
}
async destroy(): Promise<void> {
await this.connectionManager.drain();
}
}

View File

@@ -23,7 +23,10 @@ import path from 'node:path';
import {PassThrough, pipeline, Readable} from 'node:stream';
import {promisify} from 'node:util';
import {
AbortMultipartUploadCommand,
CompleteMultipartUploadCommand,
CopyObjectCommand,
CreateMultipartUploadCommand,
DeleteObjectCommand,
DeleteObjectsCommand,
GetObjectCommand,
@@ -34,6 +37,7 @@ import {
PutObjectCommand,
S3Client,
S3ServiceException,
UploadPartCommand,
} from '@aws-sdk/client-s3';
import {getSignedUrl} from '@aws-sdk/s3-request-presigner';
import {Config} from '@fluxer/api/src/Config';
@@ -203,7 +207,7 @@ export class StorageService implements IStorageService {
const command = new CopyObjectCommand({
Bucket: destinationBucket,
Key: destinationKey,
CopySource: `${sourceBucket}/${sourceKey}`,
CopySource: `${encodeURIComponent(sourceBucket)}/${sourceKey.split('/').map(encodeURIComponent).join('/')}`,
ContentType: newContentType,
MetadataDirective: newContentType ? 'REPLACE' : undefined,
});
@@ -365,4 +369,67 @@ export class StorageService implements IStorageService {
});
await this.getClient(params.bucket).send(command);
}
async createMultipartUpload(params: {
bucket: string;
key: string;
contentType?: string;
}): Promise<{uploadId: string}> {
const command = new CreateMultipartUploadCommand({
Bucket: params.bucket,
Key: params.key,
ContentType: params.contentType,
});
const response = await this.getClient(params.bucket).send(command);
assert(response.UploadId != null, 'Missing UploadId in CreateMultipartUpload response');
return {uploadId: response.UploadId};
}
async uploadPart(params: {
bucket: string;
key: string;
uploadId: string;
partNumber: number;
body: Uint8Array;
}): Promise<{etag: string}> {
const command = new UploadPartCommand({
Bucket: params.bucket,
Key: params.key,
UploadId: params.uploadId,
PartNumber: params.partNumber,
Body: params.body,
});
const response = await this.getClient(params.bucket).send(command);
assert(response.ETag != null, 'Missing ETag in UploadPart response');
return {etag: response.ETag};
}
async completeMultipartUpload(params: {
bucket: string;
key: string;
uploadId: string;
parts: Array<{partNumber: number; etag: string}>;
}): Promise<void> {
const command = new CompleteMultipartUploadCommand({
Bucket: params.bucket,
Key: params.key,
UploadId: params.uploadId,
MultipartUpload: {
Parts: params.parts.map((part) => ({
PartNumber: part.partNumber,
ETag: part.etag,
})),
},
});
await this.getClient(params.bucket).send(command);
}
async abortMultipartUpload(params: {bucket: string; key: string; uploadId: string}): Promise<void> {
const command = new AbortMultipartUploadCommand({
Bucket: params.bucket,
Key: params.key,
UploadId: params.uploadId,
});
await this.getClient(params.bucket).send(command);
}
}

View File

@@ -25,8 +25,11 @@ import type {IUserRepository} from '@fluxer/api/src/user/IUserRepository';
import {mapUserToPartialResponse} from '@fluxer/api/src/user/UserMappers';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {Coalescer} from '@fluxer/cache/src/utils/Coalescer';
import {UserFlags} from '@fluxer/constants/src/UserConstants';
import {UnknownUserError} from '@fluxer/errors/src/domains/user/UnknownUserError';
import {
DELETED_USER_DISCRIMINATOR,
DELETED_USER_GLOBAL_NAME,
DELETED_USER_USERNAME,
} from '@fluxer/constants/src/UserConstants';
import type {UserPartialResponse} from '@fluxer/schema/src/domains/user/UserResponseSchemas';
import {seconds} from 'itty-time';
@@ -66,10 +69,16 @@ export class UserCacheService {
const userPartialResponse = await this.coalescer.coalesce(cacheKey, async () => {
const user = await this.userRepository.findUnique(userId);
if (!user) {
throw new UnknownUserError();
}
if ((user.flags & UserFlags.DELETED) !== 0n && !user.isSystem) {
throw new UnknownUserError();
Logger.warn({userId}, 'User not found during partial resolution, returning deleted user fallback');
return {
id: userId.toString(),
username: DELETED_USER_USERNAME,
discriminator: DELETED_USER_DISCRIMINATOR.toString().padStart(4, '0'),
global_name: DELETED_USER_GLOBAL_NAME,
avatar: null,
avatar_color: null,
flags: 0,
};
}
return mapUserToPartialResponse(user);
});

View File

@@ -0,0 +1,670 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {getRoles, updateRole} from '@fluxer/api/src/guild/tests/GuildTestUtils';
import {
acceptInvite,
createChannelInvite,
createDMChannel,
createFriendship,
createGuild,
createMessageHarness,
createTestAccount,
ensureSessionStarted,
sendMessage,
updateChannelPermissions,
} from '@fluxer/api/src/message/tests/MessageTestUtils';
import type {ApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {HTTP_STATUS, TEST_IDS} from '@fluxer/api/src/test/TestConstants';
import {createBuilder} from '@fluxer/api/src/test/TestRequestBuilder';
import {APIErrorCodes} from '@fluxer/constants/src/ApiErrorCodes';
import {MessageReferenceTypes, Permissions} from '@fluxer/constants/src/ChannelConstants';
import {afterAll, beforeAll, beforeEach, describe, expect, it} from 'vitest';
describe('Message forwarding access control', () => {
let harness: ApiTestHarness;
beforeAll(async () => {
harness = await createMessageHarness();
});
beforeEach(async () => {
await harness.reset();
});
afterAll(async () => {
await harness?.shutdown();
});
describe('DM access control', () => {
it('rejects forwarding from a DM the user is not a participant of', async () => {
const user1 = await createTestAccount(harness);
const user2 = await createTestAccount(harness);
const attacker = await createTestAccount(harness);
await ensureSessionStarted(harness, user1.token);
await ensureSessionStarted(harness, user2.token);
await ensureSessionStarted(harness, attacker.token);
await createFriendship(harness, user1, user2);
await createFriendship(harness, user1, attacker);
const privateDm = await createDMChannel(harness, user1.token, user2.userId);
const originalMessage = await sendMessage(harness, user1.token, privateDm.id, 'This is a private message');
const attackerDm = await createDMChannel(harness, attacker.token, user1.userId);
await createBuilder(harness, attacker.token)
.post(`/channels/${attackerDm.id}/messages`)
.body({
message_reference: {
message_id: originalMessage.id,
channel_id: privateDm.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.NOT_FOUND, APIErrorCodes.UNKNOWN_CHANNEL)
.execute();
});
it('rejects forwarding with a fabricated DM channel ID', async () => {
const user1 = await createTestAccount(harness);
const user2 = await createTestAccount(harness);
await ensureSessionStarted(harness, user1.token);
await ensureSessionStarted(harness, user2.token);
await createFriendship(harness, user1, user2);
const dm = await createDMChannel(harness, user1.token, user2.userId);
const message = await sendMessage(harness, user1.token, dm.id, 'Test');
await createBuilder(harness, user1.token)
.post(`/channels/${dm.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: TEST_IDS.NONEXISTENT_CHANNEL,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.NOT_FOUND, APIErrorCodes.UNKNOWN_CHANNEL)
.execute();
});
it('allows forwarding from a DM the user is a participant of', async () => {
const user1 = await createTestAccount(harness);
const user2 = await createTestAccount(harness);
const user3 = await createTestAccount(harness);
await ensureSessionStarted(harness, user1.token);
await ensureSessionStarted(harness, user2.token);
await ensureSessionStarted(harness, user3.token);
await createFriendship(harness, user1, user2);
await createFriendship(harness, user1, user3);
const sourceDm = await createDMChannel(harness, user1.token, user2.userId);
const destDm = await createDMChannel(harness, user1.token, user3.userId);
const originalMessage = await sendMessage(harness, user1.token, sourceDm.id, 'Message to forward');
const forwarded = await createBuilder<{id: string; message_snapshots?: Array<{content?: string}>}>(
harness,
user1.token,
)
.post(`/channels/${destDm.id}/messages`)
.body({
message_reference: {
message_id: originalMessage.id,
channel_id: sourceDm.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.OK)
.execute();
expect(forwarded.message_snapshots).toBeDefined();
expect(forwarded.message_snapshots!.length).toBeGreaterThan(0);
expect(forwarded.message_snapshots![0].content).toBe('Message to forward');
});
});
describe('Guild channel access control', () => {
it('rejects forwarding from a guild channel the user cannot view', async () => {
const owner = await createTestAccount(harness);
const member = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
await ensureSessionStarted(harness, member.token);
const guild = await createGuild(harness, owner.token, 'Access control test guild');
const invite = await createChannelInvite(harness, owner.token, guild.system_channel_id!);
await acceptInvite(harness, member.token, invite.code);
const secretChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'secret', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const publicChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'public', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const secretMessage = await sendMessage(harness, owner.token, secretChannel.id, 'Top secret content');
await updateChannelPermissions(harness, owner.token, secretChannel.id, member.userId, {
type: 1,
deny: Permissions.VIEW_CHANNEL.toString(),
});
await createBuilder(harness, member.token)
.post(`/channels/${publicChannel.id}/messages`)
.body({
message_reference: {
message_id: secretMessage.id,
channel_id: secretChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN)
.execute();
});
it('rejects forwarding from a guild channel without READ_MESSAGE_HISTORY', async () => {
const owner = await createTestAccount(harness);
const member = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
await ensureSessionStarted(harness, member.token);
const guild = await createGuild(harness, owner.token, 'Read history test guild');
const invite = await createChannelInvite(harness, owner.token, guild.system_channel_id!);
await acceptInvite(harness, member.token, invite.code);
const sourceChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'source', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const destChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'destination', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, owner.token, sourceChannel.id, 'Message with restricted history');
await updateChannelPermissions(harness, owner.token, sourceChannel.id, member.userId, {
type: 1,
deny: Permissions.READ_MESSAGE_HISTORY.toString(),
});
await createBuilder(harness, member.token)
.post(`/channels/${destChannel.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: sourceChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN)
.execute();
});
it('rejects forwarding from a guild the user is not a member of', async () => {
const guildOwner = await createTestAccount(harness);
const outsider = await createTestAccount(harness);
await ensureSessionStarted(harness, guildOwner.token);
await ensureSessionStarted(harness, outsider.token);
await createFriendship(harness, guildOwner, outsider);
const guild = await createGuild(harness, guildOwner.token, 'Private guild');
const guildChannel = await createBuilder<{id: string}>(harness, guildOwner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'internal', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, guildOwner.token, guildChannel.id, 'Internal guild message');
const outsiderGuild = await createGuild(harness, outsider.token, 'Outsider guild');
await createBuilder(harness, outsider.token)
.post(`/channels/${outsiderGuild.system_channel_id!}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: guildChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN, APIErrorCodes.ACCESS_DENIED)
.execute();
});
it('rejects forwarding with a nonexistent source message ID', async () => {
const owner = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
const guild = await createGuild(harness, owner.token, 'Nonexistent message test');
const channel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'test', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
await createBuilder(harness, owner.token)
.post(`/channels/${channel.id}/messages`)
.body({
message_reference: {
message_id: TEST_IDS.NONEXISTENT_MESSAGE,
channel_id: channel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.NOT_FOUND, APIErrorCodes.UNKNOWN_MESSAGE)
.execute();
});
it('rejects forwarding from a guild channel where @everyone lacks READ_MESSAGE_HISTORY', async () => {
const owner = await createTestAccount(harness);
const member = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
await ensureSessionStarted(harness, member.token);
const guild = await createGuild(harness, owner.token, 'Role permission test guild');
const invite = await createChannelInvite(harness, owner.token, guild.system_channel_id!);
await acceptInvite(harness, member.token, invite.code);
const sourceChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'source', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const destChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'dest', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, owner.token, sourceChannel.id, 'Secret history message');
const roles = await getRoles(harness, owner.token, guild.id);
const everyoneRole = roles.find((r) => r.id === guild.id);
if (everyoneRole) {
const currentPerms = BigInt(everyoneRole.permissions);
await updateRole(harness, owner.token, guild.id, everyoneRole.id, {
permissions: (currentPerms & ~Permissions.READ_MESSAGE_HISTORY).toString(),
});
}
await createBuilder(harness, member.token)
.post(`/channels/${destChannel.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: sourceChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN)
.execute();
});
});
describe('Cross-boundary forwarding', () => {
it('rejects forwarding from inaccessible guild channel to a DM', async () => {
const guildOwner = await createTestAccount(harness);
const outsider = await createTestAccount(harness);
await ensureSessionStarted(harness, guildOwner.token);
await ensureSessionStarted(harness, outsider.token);
await createFriendship(harness, guildOwner, outsider);
const guild = await createGuild(harness, guildOwner.token, 'Guild to DM test');
const guildChannel = await createBuilder<{id: string}>(harness, guildOwner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'private', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, guildOwner.token, guildChannel.id, 'Guild-only content');
const dm = await createDMChannel(harness, outsider.token, guildOwner.userId);
await createBuilder(harness, outsider.token)
.post(`/channels/${dm.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: guildChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN, APIErrorCodes.ACCESS_DENIED)
.execute();
});
it('rejects forwarding from inaccessible DM to a guild channel', async () => {
const user1 = await createTestAccount(harness);
const user2 = await createTestAccount(harness);
const attacker = await createTestAccount(harness);
await ensureSessionStarted(harness, user1.token);
await ensureSessionStarted(harness, user2.token);
await ensureSessionStarted(harness, attacker.token);
await createFriendship(harness, user1, user2);
const privateDm = await createDMChannel(harness, user1.token, user2.userId);
const privateMessage = await sendMessage(harness, user1.token, privateDm.id, 'Secret DM content');
const attackerGuild = await createGuild(harness, attacker.token, 'Attacker guild');
await createBuilder(harness, attacker.token)
.post(`/channels/${attackerGuild.system_channel_id!}/messages`)
.body({
message_reference: {
message_id: privateMessage.id,
channel_id: privateDm.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.NOT_FOUND, APIErrorCodes.UNKNOWN_CHANNEL)
.execute();
});
it('allows forwarding from accessible guild channel to DM', async () => {
const owner = await createTestAccount(harness);
const friend = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
await ensureSessionStarted(harness, friend.token);
await createFriendship(harness, owner, friend);
const guild = await createGuild(harness, owner.token, 'Forward to DM guild');
const invite = await createChannelInvite(harness, owner.token, guild.system_channel_id!);
await acceptInvite(harness, friend.token, invite.code);
const guildChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'public', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, owner.token, guildChannel.id, 'Forwardable guild message');
const dm = await createDMChannel(harness, friend.token, owner.userId);
const forwarded = await createBuilder<{id: string; message_snapshots?: Array<{content?: string}>}>(
harness,
friend.token,
)
.post(`/channels/${dm.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: guildChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.OK)
.execute();
expect(forwarded.message_snapshots).toBeDefined();
expect(forwarded.message_snapshots!.length).toBeGreaterThan(0);
expect(forwarded.message_snapshots![0].content).toBe('Forwardable guild message');
});
it('allows forwarding between guilds when user has access to both', async () => {
const user = await createTestAccount(harness);
await ensureSessionStarted(harness, user.token);
const guildA = await createGuild(harness, user.token, 'Guild A');
const guildB = await createGuild(harness, user.token, 'Guild B');
const channelA = await createBuilder<{id: string}>(harness, user.token)
.post(`/guilds/${guildA.id}/channels`)
.body({name: 'source', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const channelB = await createBuilder<{id: string}>(harness, user.token)
.post(`/guilds/${guildB.id}/channels`)
.body({name: 'destination', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const message = await sendMessage(harness, user.token, channelA.id, 'Cross-guild forward');
const forwarded = await createBuilder<{id: string; message_snapshots?: Array<{content?: string}>}>(
harness,
user.token,
)
.post(`/channels/${channelB.id}/messages`)
.body({
message_reference: {
message_id: message.id,
channel_id: channelA.id,
guild_id: guildA.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.OK)
.execute();
expect(forwarded.message_snapshots).toBeDefined();
expect(forwarded.message_snapshots![0].content).toBe('Cross-guild forward');
});
});
describe('Reported vulnerability: arbitrary ID forwarding', () => {
it('rejects forwarding a DM message using raw channel and message IDs', async () => {
const victim1 = await createTestAccount(harness);
const victim2 = await createTestAccount(harness);
const attacker = await createTestAccount(harness);
await ensureSessionStarted(harness, victim1.token);
await ensureSessionStarted(harness, victim2.token);
await ensureSessionStarted(harness, attacker.token);
await createFriendship(harness, victim1, victim2);
await createFriendship(harness, attacker, victim1);
const victimDm = await createDMChannel(harness, victim1.token, victim2.userId);
const privateMessage = await sendMessage(
harness,
victim1.token,
victimDm.id,
'Private conversation between victims',
);
const attackerGuild = await createGuild(harness, attacker.token, 'Attacker guild');
await createBuilder(harness, attacker.token)
.post(`/channels/${attackerGuild.system_channel_id!}/messages`)
.body({
content: '',
message_reference: {
message_id: privateMessage.id,
channel_id: victimDm.id,
type: 1,
},
flags: 1,
})
.expect(HTTP_STATUS.NOT_FOUND, APIErrorCodes.UNKNOWN_CHANNEL)
.execute();
});
it('rejects forwarding a guild message using raw guild, channel, and message IDs', async () => {
const guildOwner = await createTestAccount(harness);
const attacker = await createTestAccount(harness);
await ensureSessionStarted(harness, guildOwner.token);
await ensureSessionStarted(harness, attacker.token);
await createFriendship(harness, guildOwner, attacker);
const targetGuild = await createGuild(harness, guildOwner.token, 'Target private guild');
const privateChannel = await createBuilder<{id: string}>(harness, guildOwner.token)
.post(`/guilds/${targetGuild.id}/channels`)
.body({name: 'private-channel', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const privateMessage = await sendMessage(
harness,
guildOwner.token,
privateChannel.id,
'Confidential guild message',
);
const attackerGuild = await createGuild(harness, attacker.token, 'Attacker guild');
await createBuilder(harness, attacker.token)
.post(`/channels/${attackerGuild.system_channel_id!}/messages`)
.body({
content: '',
message_reference: {
message_id: privateMessage.id,
channel_id: privateChannel.id,
guild_id: targetGuild.id,
type: 1,
},
flags: 1,
})
.expect(HTTP_STATUS.FORBIDDEN, APIErrorCodes.ACCESS_DENIED)
.execute();
});
it('snapshot content is never returned when forward is rejected', async () => {
const victim1 = await createTestAccount(harness);
const victim2 = await createTestAccount(harness);
const attacker = await createTestAccount(harness);
await ensureSessionStarted(harness, victim1.token);
await ensureSessionStarted(harness, victim2.token);
await ensureSessionStarted(harness, attacker.token);
await createFriendship(harness, victim1, victim2);
await createFriendship(harness, attacker, victim1);
const victimDm = await createDMChannel(harness, victim1.token, victim2.userId);
const secretContent = 'Super secret message that must not leak';
const privateMessage = await sendMessage(harness, victim1.token, victimDm.id, secretContent);
const attackerDm = await createDMChannel(harness, attacker.token, victim1.userId);
const {json} = await createBuilder<Record<string, unknown>>(harness, attacker.token)
.post(`/channels/${attackerDm.id}/messages`)
.body({
content: '',
message_reference: {
message_id: privateMessage.id,
channel_id: victimDm.id,
type: 1,
},
flags: 1,
})
.expect(HTTP_STATUS.NOT_FOUND)
.executeWithResponse();
const responseText = JSON.stringify(json);
expect(responseText).not.toContain(secretContent);
expect(json).not.toHaveProperty('message_snapshots');
});
});
describe('Snapshot content isolation', () => {
it('does not leak message content through forward snapshot when access is denied', async () => {
const owner = await createTestAccount(harness);
const member = await createTestAccount(harness);
await ensureSessionStarted(harness, owner.token);
await ensureSessionStarted(harness, member.token);
const guild = await createGuild(harness, owner.token, 'Snapshot isolation test guild');
const invite = await createChannelInvite(harness, owner.token, guild.system_channel_id!);
await acceptInvite(harness, member.token, invite.code);
const secretChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'secret', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
const destChannel = await createBuilder<{id: string}>(harness, owner.token)
.post(`/guilds/${guild.id}/channels`)
.body({name: 'dest', type: 0})
.expect(HTTP_STATUS.OK)
.execute();
await sendMessage(harness, owner.token, secretChannel.id, 'Highly confidential information');
await updateChannelPermissions(harness, owner.token, secretChannel.id, member.userId, {
type: 1,
deny: Permissions.VIEW_CHANNEL.toString(),
});
const messages = await createBuilder<Array<{id: string}>>(harness, owner.token)
.get(`/channels/${secretChannel.id}/messages`)
.expect(HTTP_STATUS.OK)
.execute();
const secretMessageId = messages[0].id;
const {json} = await createBuilder<Record<string, unknown>>(harness, member.token)
.post(`/channels/${destChannel.id}/messages`)
.body({
message_reference: {
message_id: secretMessageId,
channel_id: secretChannel.id,
guild_id: guild.id,
type: MessageReferenceTypes.FORWARD,
},
})
.expect(HTTP_STATUS.FORBIDDEN)
.executeWithResponse();
expect(json).not.toHaveProperty('message_snapshots');
expect(JSON.stringify(json)).not.toContain('Highly confidential information');
});
});
});

View File

@@ -30,7 +30,6 @@ interface RequireXForwardedForOptions {
const defaultExemptPaths: Array<string> = [
'/_health',
'/_rpc',
'/webhooks/livekit',
'/test',
'/connections/bluesky/client-metadata.json',

View File

@@ -35,6 +35,7 @@ import {Config} from '@fluxer/api/src/Config';
import {ChannelRepository} from '@fluxer/api/src/channel/ChannelRepository';
import {ChannelRequestService} from '@fluxer/api/src/channel/services/ChannelRequestService';
import {ChannelService} from '@fluxer/api/src/channel/services/ChannelService';
import {ChunkedUploadService} from '@fluxer/api/src/channel/services/ChunkedUploadService';
import {MessageRequestService} from '@fluxer/api/src/channel/services/message/MessageRequestService';
import {ScheduledMessageService} from '@fluxer/api/src/channel/services/ScheduledMessageService';
import {StreamPreviewService} from '@fluxer/api/src/channel/services/StreamPreviewService';
@@ -153,17 +154,19 @@ import {UserService} from '@fluxer/api/src/user/services/UserService';
import {UserPermissionUtils} from '@fluxer/api/src/utils/UserPermissionUtils';
import {VoiceRepository} from '@fluxer/api/src/voice/VoiceRepository';
import {VoiceService} from '@fluxer/api/src/voice/VoiceService';
import {SendGridWebhookService} from '@fluxer/api/src/webhook/SendGridWebhookService';
import {SweegoWebhookService} from '@fluxer/api/src/webhook/SweegoWebhookService';
import {WebhookRepository} from '@fluxer/api/src/webhook/WebhookRepository';
import {WebhookRequestService} from '@fluxer/api/src/webhook/WebhookRequestService';
import {WebhookService} from '@fluxer/api/src/webhook/WebhookService';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {KVCacheProvider} from '@fluxer/cache/src/providers/KVCacheProvider';
import {TEXT_BASED_CHANNEL_TYPES} from '@fluxer/constants/src/ChannelConstants';
import {EmailI18nService} from '@fluxer/email/src/EmailI18nService';
import type {EmailConfig, UserBouncedEmailChecker} from '@fluxer/email/src/EmailProviderTypes';
import {EmailService} from '@fluxer/email/src/EmailService';
import type {IEmailService} from '@fluxer/email/src/IEmailService';
import {TestEmailService} from '@fluxer/email/src/TestEmailService';
import {CannotSendMessageToNonTextChannelError} from '@fluxer/errors/src/domains/channel/CannotSendMessageToNonTextChannelError';
import {createMockLogger} from '@fluxer/logger/src/mock';
import {RateLimitService} from '@fluxer/rate_limit/src/RateLimitService';
import type {ISmsProvider} from '@fluxer/sms/src/providers/ISmsProvider';
@@ -471,6 +474,19 @@ export const ServiceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) =>
mediaService,
);
const chunkedUploadService = new ChunkedUploadService(
storageService,
kvClient,
userRepository,
limitConfigService,
channelService.getChannelAuthenticated.bind(channelService),
(channel) => {
if (!TEXT_BASED_CHANNEL_TYPES.has(channel.type)) {
throw new CannotSendMessageToNonTextChannelError();
}
},
);
const scheduledMessageRepository = new ScheduledMessageRepository();
const scheduledMessageService = new ScheduledMessageService(
channelService,
@@ -793,7 +809,7 @@ export const ServiceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) =>
donationService = new DonationService(donationMagicLinkService, donationCheckoutService);
}
const sendGridWebhookService = new SendGridWebhookService(userRepository, gatewayService);
const sweegoWebhookService = new SweegoWebhookService(userRepository, gatewayService);
const applicationService = new ApplicationService({
applicationRepository,
@@ -853,7 +869,7 @@ export const ServiceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) =>
userCacheService,
mediaService,
liveKitWebhookService ?? null,
sendGridWebhookService,
sweegoWebhookService,
);
const packRequestService = new PackRequestService(packService);
@@ -870,6 +886,7 @@ export const ServiceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) =>
ctx.set('cacheService', cacheService);
ctx.set('channelService', channelService);
ctx.set('channelRequestService', channelRequestService);
ctx.set('chunkedUploadService', chunkedUploadService);
ctx.set('messageRequestService', messageRequestService);
ctx.set('channelRepository', channelRepository);
ctx.set('connectionService', connectionService);
@@ -912,7 +929,7 @@ export const ServiceMiddleware = createMiddleware<HonoEnv>(async (ctx, next) =>
ctx.set('reportRequestService', reportRequestService);
ctx.set('rpcService', rpcService);
ctx.set('searchService', searchService);
ctx.set('sendGridWebhookService', sendGridWebhookService);
ctx.set('sweegoWebhookService', sweegoWebhookService);
ctx.set('snowflakeService', snowflakeService);
ctx.set('storageService', storageService);
ctx.set('themeService', themeService);

View File

@@ -35,7 +35,6 @@ import type {ISearchProvider} from '@fluxer/api/src/search/ISearchProvider';
import {VoiceAvailabilityService} from '@fluxer/api/src/voice/VoiceAvailabilityService';
import {VoiceRepository} from '@fluxer/api/src/voice/VoiceRepository';
import {VoiceTopology} from '@fluxer/api/src/voice/VoiceTopology';
import {WorkerService as ProdWorkerService} from '@fluxer/api/src/worker/WorkerService';
import type {IKVProvider} from '@fluxer/kv_client/src/IKVProvider';
import {KVClient} from '@fluxer/kv_client/src/KVClient';
import type {S3Service} from '@fluxer/s3/src/s3/S3Service';
@@ -70,7 +69,7 @@ export function getWorkerService(): IWorkerService {
if (_injectedWorkerService) {
return _injectedWorkerService;
}
return new ProdWorkerService();
throw new Error('WorkerService has not been initialized. Call setInjectedWorkerService() during startup.');
}
let _injectedGatewayService: IGatewayService | undefined;

View File

@@ -122,15 +122,13 @@ export class ApplicationRepository implements IApplicationRepository {
}
const result = await executeVersionedUpdate<ApplicationRow, 'application_id'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<ApplicationRow>(SELECT_APPLICATION_CQL, {application_id: applicationId});
},
async () => fetchOne<ApplicationRow>(SELECT_APPLICATION_CQL, {application_id: applicationId}),
(current) => ({
pk: {application_id: applicationId},
patch: buildPatchFromData(data, current, APPLICATION_COLUMNS, ['application_id']),
}),
Applications,
{initialData: oldData},
);
const batch = new BatchBuilder();

View File

@@ -65,10 +65,19 @@ describe('OAuth2 Application Delete', () => {
.expect(HTTP_STATUS.UNAUTHORIZED)
.execute();
await createBuilder(harness, account.token)
const botUser = await createBuilder<{
id: string;
username: string;
discriminator: string;
avatar: string | null;
}>(harness, account.token)
.get(`/users/${createResult.botUserId}`)
.expect(HTTP_STATUS.NOT_FOUND)
.expect(HTTP_STATUS.OK)
.execute();
expect(botUser.username).toBe('DeletedUser');
expect(botUser.discriminator).toBe('0000');
expect(botUser.avatar).toBeNull();
});
test('keeps bot-authored messages readable after deleting the application', async () => {

View File

@@ -140,4 +140,19 @@ export const ChannelRateLimitConfigs = {
bucket: 'channel:stream:preview:post::stream_key',
config: {limit: 20, windowMs: ms('10 seconds')},
} as RouteRateLimitConfig,
CHANNEL_CHUNKED_UPLOAD_CREATE: {
bucket: 'channel:chunked_upload:create::channel_id',
config: {limit: 5, windowMs: ms('10 seconds')},
} as RouteRateLimitConfig,
CHANNEL_CHUNKED_UPLOAD_CHUNK: {
bucket: 'channel:chunked_upload:chunk::channel_id',
config: {limit: 50, windowMs: ms('10 seconds')},
} as RouteRateLimitConfig,
CHANNEL_CHUNKED_UPLOAD_COMPLETE: {
bucket: 'channel:chunked_upload:complete::channel_id',
config: {limit: 5, windowMs: ms('10 seconds')},
} as RouteRateLimitConfig,
} as const;

View File

@@ -0,0 +1,116 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ILogger} from '@fluxer/api/src/ILogger';
import {RequestCacheMiddleware} from '@fluxer/api/src/middleware/RequestCacheMiddleware';
import {ServiceMiddleware} from '@fluxer/api/src/middleware/ServiceMiddleware';
import type {HonoEnv} from '@fluxer/api/src/types/HonoEnv';
import {Validator} from '@fluxer/api/src/Validator';
import {AppErrorHandler} from '@fluxer/errors/src/domains/core/ErrorHandlers';
import type {INatsConnectionManager} from '@fluxer/nats/src/INatsConnectionManager';
import {RpcRequest} from '@fluxer/schema/src/domains/rpc/RpcSchemas';
import {Hono} from 'hono';
import {type Msg, StringCodec, type Subscription} from 'nats';
const RPC_SUBJECT = 'rpc.api';
const QUEUE_GROUP = 'api';
export class NatsApiRpcListener {
private readonly connectionManager: INatsConnectionManager;
private readonly logger: ILogger;
private readonly rpcApp: Hono<HonoEnv>;
private readonly codec = StringCodec();
private subscription: Subscription | null = null;
private running = false;
constructor(connectionManager: INatsConnectionManager, logger: ILogger) {
this.connectionManager = connectionManager;
this.logger = logger;
this.rpcApp = new Hono<HonoEnv>({strict: true});
this.rpcApp.onError(AppErrorHandler);
this.rpcApp.use(RequestCacheMiddleware);
this.rpcApp.use(ServiceMiddleware);
this.rpcApp.post('/', Validator('json', RpcRequest), async (ctx) => {
const request = ctx.req.valid('json');
const rpcService = ctx.get('rpcService');
const requestCache = ctx.get('requestCache');
const response = await rpcService.handleRpcRequest({request, requestCache});
return ctx.json(response);
});
}
async start(): Promise<void> {
await this.connectionManager.connect();
const connection = this.connectionManager.getConnection();
this.subscription = connection.subscribe(RPC_SUBJECT, {queue: QUEUE_GROUP});
this.running = true;
this.logger.info(`NATS API RPC listener started, subscribed to ${RPC_SUBJECT} with queue group ${QUEUE_GROUP}`);
this.processMessages();
}
async stop(): Promise<void> {
this.running = false;
if (this.subscription) {
this.subscription.unsubscribe();
this.subscription = null;
}
await this.connectionManager.drain();
this.logger.info('NATS API RPC listener stopped');
}
private async processMessages(): Promise<void> {
if (!this.subscription) return;
for await (const msg of this.subscription) {
if (!this.running) break;
this.handleMessage(msg).catch((error) => {
this.logger.error(
{error: error instanceof Error ? error.message : String(error)},
'NATS API RPC handler error',
);
});
}
}
private async handleMessage(msg: Msg): Promise<void> {
const payload = this.codec.decode(msg.data);
try {
const response = await this.rpcApp.request('/', {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: payload,
});
if (!msg.reply) return;
const responseBody = await response.text();
if (response.ok) {
msg.respond(this.codec.encode(responseBody));
} else {
msg.respond(this.codec.encode(JSON.stringify({_error: true, status: response.status, message: responseBody})));
}
} catch (error) {
if (!msg.reply) return;
const message = error instanceof Error ? error.message : 'internal_error';
msg.respond(this.codec.encode(JSON.stringify({_error: true, status: 500, message})));
}
}
}

View File

@@ -1,90 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {timingSafeEqual} from 'node:crypto';
import {Config} from '@fluxer/api/src/Config';
import {Logger} from '@fluxer/api/src/Logger';
import {OpenAPI} from '@fluxer/api/src/middleware/ResponseTypeMiddleware';
import type {HonoApp, HonoEnv} from '@fluxer/api/src/types/HonoEnv';
import {Validator} from '@fluxer/api/src/Validator';
import {UnauthorizedError} from '@fluxer/errors/src/domains/core/UnauthorizedError';
import {RpcRequest, RpcResponse} from '@fluxer/schema/src/domains/rpc/RpcSchemas';
import {createMiddleware} from 'hono/factory';
const InternalNetworkRequired = createMiddleware<HonoEnv>(async (ctx, next) => {
const authHeader = ctx.req.header('Authorization');
const expectedAuth = `Bearer ${Config.gateway.rpcSecret}`;
if (!authHeader) {
throw new UnauthorizedError();
}
const authBuffer = Buffer.from(authHeader, 'utf8');
const expectedBuffer = Buffer.from(expectedAuth, 'utf8');
if (authBuffer.length !== expectedBuffer.length || !timingSafeEqual(authBuffer, expectedBuffer)) {
throw new UnauthorizedError();
}
await next();
});
export function RpcController(app: HonoApp) {
app.post(
'/_rpc',
InternalNetworkRequired,
OpenAPI({
operationId: 'handle_rpc_request',
summary: 'Handle internal RPC request',
description:
'Internal RPC endpoint for handling inter-service communication. Requires internal network authorization.',
responseSchema: RpcResponse,
statusCode: 200,
security: [],
tags: 'Gateway',
}),
Validator('json', RpcRequest),
async (ctx) => {
const request = ctx.req.valid('json');
if (request.type === 'session') {
Logger.debug(
{
rpcType: request.type,
version: request.version,
hasIp: request.ip !== undefined,
hasLatitude: request.latitude !== undefined,
hasLongitude: request.longitude !== undefined,
},
'RPC session request received',
);
} else {
Logger.debug({rpcType: request.type}, 'RPC request received');
}
try {
const response = await ctx.get('rpcService').handleRpcRequest({request, requestCache: ctx.get('requestCache')});
return ctx.json(response);
} catch (error) {
Logger.warn(
{
rpcType: request.type,
error: error instanceof Error ? error.message : String(error),
},
'RPC request failed',
);
throw error;
}
},
);
}

View File

@@ -1,220 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createTestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {createOAuth2BotApplication} from '@fluxer/api/src/bot/tests/BotTestUtils';
import {Config} from '@fluxer/api/src/Config';
import {setupTestGuildWithMembers} from '@fluxer/api/src/guild/tests/GuildTestUtils';
import {createFriendship} from '@fluxer/api/src/message/tests/MessageTestUtils';
import {type ApiTestHarness, createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {HTTP_STATUS} from '@fluxer/api/src/test/TestConstants';
import {createBuilder} from '@fluxer/api/src/test/TestRequestBuilder';
import {grantPremium, updateUserProfile} from '@fluxer/api/src/user/tests/UserTestUtils';
import {UserFlags, UserPremiumTypes} from '@fluxer/constants/src/UserConstants';
import {afterEach, beforeEach, describe, expect, test} from 'vitest';
interface SessionRpcResponse {
type: 'session';
data: {
user: {
id: string;
discriminator: string;
};
relationships: Array<{
id: string;
}>;
};
}
interface GuildRpcResponse {
type: 'guild';
data: {
members: Array<{
user: {
id: string;
};
}>;
};
}
describe('Gateway RPC resilience', () => {
let harness: ApiTestHarness;
beforeEach(async () => {
harness = await createApiTestHarness();
});
afterEach(async () => {
await harness.shutdown();
});
test('session RPC skips relationships pointing to deleted users', async () => {
const account = await createTestAccount(harness);
const friend = await createTestAccount(harness);
await createFriendship(harness, account, friend);
await createBuilder(harness, '')
.patch(`/test/users/${friend.userId}/flags`)
.body({flags: UserFlags.DELETED.toString()})
.expect(HTTP_STATUS.OK)
.execute();
await createBuilder(harness, '').post('/test/cache-clear').expect(HTTP_STATUS.OK).execute();
const response = await createBuilder<SessionRpcResponse>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'session',
token: account.token,
version: 1,
ip: '127.0.0.1',
})
.expect(HTTP_STATUS.OK)
.execute();
expect(response.type).toBe('session');
expect(response.data.relationships.some((relationship) => relationship.id === friend.userId)).toBe(false);
});
test('session RPC authenticates bot tokens with Bot prefix', async () => {
const owner = await createTestAccount(harness);
const bot = await createOAuth2BotApplication(harness, owner.token, `RPC Bot Prefix ${Date.now()}`);
const response = await createBuilder<SessionRpcResponse>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'session',
token: `Bot ${bot.botToken}`,
version: 1,
ip: '127.0.0.1',
})
.expect(HTTP_STATUS.OK)
.execute();
expect(response.type).toBe('session');
expect(response.data.user.id).toBe(bot.botUserId);
});
test('guild RPC skips members whose backing users are deleted', async () => {
const {owner, members, guild} = await setupTestGuildWithMembers(harness, 1);
const staleMember = members[0];
expect(staleMember).toBeDefined();
await createBuilder(harness, '')
.patch(`/test/users/${staleMember!.userId}/flags`)
.body({flags: UserFlags.DELETED.toString()})
.expect(HTTP_STATUS.OK)
.execute();
await createBuilder(harness, '').post('/test/cache-clear').expect(HTTP_STATUS.OK).execute();
const response = await createBuilder<GuildRpcResponse>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'guild',
guild_id: guild.id,
})
.expect(HTTP_STATUS.OK)
.execute();
expect(response.type).toBe('guild');
expect(response.data.members.some((member) => member.user.id === staleMember!.userId)).toBe(false);
expect(response.data.members.some((member) => member.user.id === owner.userId)).toBe(true);
});
test('session RPC rerolls discriminator 0000 for non-lifetime premium users', async () => {
const account = await createTestAccount(harness);
if (!account.username) {
throw new Error('Expected test account username');
}
await grantPremium(harness, account.userId, UserPremiumTypes.LIFETIME);
const lifetimeUpdated = await updateUserProfile(harness, account.token, {
username: account.username,
discriminator: '0000',
password: account.password,
});
expect(lifetimeUpdated.json.discriminator).toBe('0000');
await grantPremium(harness, account.userId, UserPremiumTypes.SUBSCRIPTION);
const response = await createBuilder<SessionRpcResponse>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'session',
token: account.token,
version: 1,
ip: '127.0.0.1',
})
.expect(HTTP_STATUS.OK)
.execute();
expect(response.type).toBe('session');
expect(response.data.user.id).toBe(account.userId);
expect(response.data.user.discriminator).not.toBe('0000');
});
test('session RPC sanitizes owned bot discriminators and marks BOT_SANITIZED', async () => {
const owner = await createTestAccount(harness);
const bot = await createOAuth2BotApplication(harness, owner.token, `RPC owned bot sanitization ${Date.now()}`);
const ownerBefore = await createBuilder<{flags?: string}>(harness, '')
.get(`/test/users/${owner.userId}/data-exists`)
.expect(HTTP_STATUS.OK)
.execute();
const beforeFlags = BigInt(ownerBefore.flags ?? '0');
expect((beforeFlags & UserFlags.BOT_SANITIZED) === UserFlags.BOT_SANITIZED).toBe(false);
await createBuilder(harness, '')
.patch(`/test/users/${bot.botUserId}/discriminator`)
.body({discriminator: 0})
.expect(HTTP_STATUS.OK)
.execute();
await createBuilder(harness, '').post('/test/cache-clear').expect(HTTP_STATUS.OK).execute();
const botBefore = await createBuilder<{discriminator: string}>(harness, `Bot ${bot.botToken}`)
.get('/users/@me')
.expect(HTTP_STATUS.OK)
.execute();
expect(botBefore.discriminator).toBe('0000');
await createBuilder<SessionRpcResponse>(harness, `Bearer ${Config.gateway.rpcSecret}`)
.post('/_rpc')
.body({
type: 'session',
token: owner.token,
version: 1,
ip: '127.0.0.1',
})
.expect(HTTP_STATUS.OK)
.execute();
const botAfter = await createBuilder<{discriminator: string}>(harness, `Bot ${bot.botToken}`)
.get('/users/@me')
.expect(HTTP_STATUS.OK)
.execute();
expect(botAfter.discriminator).not.toBe('0000');
const ownerAfter = await createBuilder<{flags?: string}>(harness, '')
.get(`/test/users/${owner.userId}/data-exists`)
.expect(HTTP_STATUS.OK)
.execute();
const afterFlags = BigInt(ownerAfter.flags ?? '0');
expect((afterFlags & UserFlags.BOT_SANITIZED) === UserFlags.BOT_SANITIZED).toBe(true);
});
});

View File

@@ -25,7 +25,6 @@ import {snowflakeToDate} from '@fluxer/snowflake/src/Snowflake';
export interface GuildDiscoveryContext {
description: string | null;
categoryId: number | null;
onlineCount?: number;
}
export function convertToSearchableGuild(guild: Guild, discovery?: GuildDiscoveryContext): SearchableGuild {
@@ -43,11 +42,9 @@ export function convertToSearchableGuild(guild: Guild, discovery?: GuildDiscover
verificationLevel: guild.verificationLevel,
mfaLevel: guild.mfaLevel,
nsfwLevel: guild.nsfwLevel,
memberCount: guild.memberCount,
createdAt,
discoveryDescription: discovery?.description ?? null,
discoveryCategory: discovery?.categoryId ?? null,
isDiscoverable: guild.features.has(GuildFeatures.DISCOVERABLE),
onlineCount: discovery?.onlineCount ?? 0,
};
}

View File

@@ -18,7 +18,6 @@
*/
import {createTestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {Config} from '@fluxer/api/src/Config';
import {createGuild} from '@fluxer/api/src/guild/tests/GuildTestUtils';
import {ensureSessionStarted, markChannelAsIndexed, sendMessage} from '@fluxer/api/src/message/tests/MessageTestUtils';
import {type ApiTestHarness, createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
@@ -592,11 +591,8 @@ describe('Message Search Endpoint', () => {
const account = await createTestAccount(harness);
await ensureSessionStarted(harness, account.token);
await createBuilder<{type: 'session'; data: {private_channels: Array<{id: string}>}}>(
harness,
`Bearer ${Config.gateway.rpcSecret}`,
)
.post('/_rpc')
await createBuilder<{type: 'session'; data: {private_channels: Array<{id: string}>}}>(harness, '')
.post('/test/rpc-session-init')
.body({
type: 'session',
token: account.token,

View File

@@ -750,6 +750,12 @@ export class NoopGatewayService extends IGatewayService {
return new Map();
}
async getDiscoveryGuildCounts(
_guildIds: Array<GuildID>,
): Promise<Map<GuildID, {memberCount: number; onlineCount: number}>> {
return new Map();
}
async getNodeStats(): Promise<{
status: string;
sessions: number;

View File

@@ -3095,6 +3095,46 @@ export function TestHarnessController(app: HonoApp) {
return ctx.json(result, result.success ? 200 : 409);
});
app.post('/test/users/:userId/set-contact-info', async (ctx) => {
ensureHarnessAccess(ctx);
const params = ctx.req.param() as {userId?: string};
const userIdParam = params.userId;
if (!userIdParam) {
throw new Error('Missing userId parameter');
}
const userId = createUserID(BigInt(userIdParam));
const body = await ctx.req.json();
const {phone, email} = body as {phone?: string | null; email?: string | null};
const userRepository = new UserRepository();
const user = await userRepository.findUnique(userId);
if (!user) {
throw new UnknownUserError();
}
const updates: Record<string, unknown> = {};
if (phone !== undefined) {
updates['phone'] = phone;
}
if (email !== undefined) {
updates['email'] = email;
}
if (Object.keys(updates).length === 0) {
return ctx.json({success: true, updated: false});
}
await userRepository.patchUpsert(userId, updates, user.toRow());
return ctx.json({
success: true,
updated: true,
phone: updates['phone'] ?? user.phone,
email: updates['email'] ?? user.email,
});
});
app.post('/test/cache-clear', async (ctx) => {
ensureHarnessAccess(ctx);
const cacheService = ctx.get('cacheService');
@@ -3107,4 +3147,10 @@ export function TestHarnessController(app: HonoApp) {
Logger.info({totalDeleted}, 'Cleared KV cache via test harness');
return ctx.json({cleared: true, deleted_count: totalDeleted});
});
app.post('/test/rpc-session-init', async (ctx) => {
const request = await ctx.req.json();
const response = await ctx.get('rpcService').handleRpcRequest({request, requestCache: ctx.get('requestCache')});
return ctx.json(response);
});
}

View File

@@ -21,7 +21,6 @@ import {clearSqliteStore} from '@fluxer/api/src/database/SqliteKV';
import {Logger} from '@fluxer/api/src/Logger';
import {resetSearchServices} from '@fluxer/api/src/SearchFactory';
import type {IKVProvider} from '@fluxer/kv_client/src/IKVProvider';
import type {QueueEngine} from '@fluxer/queue/src/engine/QueueEngine';
import type {S3Service} from '@fluxer/s3/src/s3/S3Service';
export type TestHarnessResetHandler = () => Promise<void>;
@@ -41,7 +40,6 @@ export async function resetTestHarnessState(): Promise<void> {
interface CreateTestHarnessResetOptions {
kvProvider?: IKVProvider;
queueEngine?: QueueEngine;
s3Service?: S3Service;
}
@@ -59,11 +57,6 @@ export function createTestHarnessResetHandler(options: CreateTestHarnessResetOpt
}
}
if (options.queueEngine) {
Logger.info('Resetting queue engine');
await options.queueEngine.resetState();
}
if (options.s3Service) {
Logger.info('Wiping S3 storage');
await options.s3Service.clearAll();

View File

@@ -0,0 +1,60 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {GatewayRpcMethodError} from '@fluxer/api/src/infrastructure/GatewayRpcError';
import type {IGatewayRpcTransport} from '@fluxer/api/src/infrastructure/IGatewayRpcTransport';
import {vi} from 'vitest';
export class MockGatewayRpcTransport implements IGatewayRpcTransport {
private methodErrors = new Map<string, string>();
private methodResults = new Map<string, unknown>();
call = vi.fn(async (method: string, _params: Record<string, unknown>): Promise<unknown> => {
const errorCode = this.methodErrors.get(method);
if (errorCode !== undefined) {
throw new GatewayRpcMethodError(errorCode);
}
const result = this.methodResults.get(method);
if (result !== undefined) {
return result;
}
return {};
});
destroy = vi.fn(async (): Promise<void> => {});
setMethodError(method: string, errorCode: string): void {
this.methodErrors.set(method, errorCode);
this.methodResults.delete(method);
}
setMethodResult(method: string, result: unknown): void {
this.methodResults.set(method, result);
this.methodErrors.delete(method);
}
reset(): void {
this.methodErrors.clear();
this.methodResults.clear();
this.call.mockClear();
this.destroy.mockClear();
}
}

View File

@@ -263,6 +263,11 @@ export class MockGatewayService implements IGatewayService {
async getDiscoveryOnlineCounts(_guildIds: Array<GuildID>): Promise<Map<GuildID, number>> {
return new Map();
}
async getDiscoveryGuildCounts(
_guildIds: Array<GuildID>,
): Promise<Map<GuildID, {memberCount: number; onlineCount: number}>> {
return new Map();
}
async getNodeStats(): Promise<{
status: string;
sessions: number;

View File

@@ -17,6 +17,7 @@
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import crypto from 'node:crypto';
import type {Readable} from 'node:stream';
import {S3ServiceException} from '@aws-sdk/client-s3';
import type {IStorageService} from '@fluxer/api/src/infrastructure/IStorageService';
@@ -33,6 +34,7 @@ export interface MockStorageServiceConfig {
export class MockStorageService implements IStorageService {
private objects: Map<string, {data: Uint8Array; contentType?: string}> = new Map();
private multipartUploads: Map<string, {parts: Map<number, Uint8Array>; key: string; bucket: string}> = new Map();
private deletedObjects: Array<{bucket: string; key: string}> = [];
private copiedObjects: Array<{
sourceBucket: string;
@@ -56,6 +58,10 @@ export class MockStorageService implements IStorageService {
readonly deleteAvatarSpy = vi.fn();
readonly listObjectsSpy = vi.fn();
readonly deleteObjectsSpy = vi.fn();
readonly createMultipartUploadSpy = vi.fn();
readonly uploadPartSpy = vi.fn();
readonly completeMultipartUploadSpy = vi.fn();
readonly abortMultipartUploadSpy = vi.fn();
private config: MockStorageServiceConfig;
@@ -227,6 +233,62 @@ export class MockStorageService implements IStorageService {
this.deleteObjectsSpy(_params);
}
async createMultipartUpload(params: {
bucket: string;
key: string;
contentType?: string;
}): Promise<{uploadId: string}> {
this.createMultipartUploadSpy(params);
const uploadId = crypto.randomUUID();
this.multipartUploads.set(uploadId, {parts: new Map(), key: params.key, bucket: params.bucket});
return {uploadId};
}
async uploadPart(params: {
bucket: string;
key: string;
uploadId: string;
partNumber: number;
body: Uint8Array;
}): Promise<{etag: string}> {
this.uploadPartSpy(params);
const upload = this.multipartUploads.get(params.uploadId);
if (!upload) {
throw new Error(`Mock: multipart upload ${params.uploadId} not found`);
}
upload.parts.set(params.partNumber, params.body);
const etag = `"etag-${params.partNumber}"`;
return {etag};
}
async completeMultipartUpload(params: {
bucket: string;
key: string;
uploadId: string;
parts: Array<{partNumber: number; etag: string}>;
}): Promise<void> {
this.completeMultipartUploadSpy(params);
const upload = this.multipartUploads.get(params.uploadId);
if (!upload) {
throw new Error(`Mock: multipart upload ${params.uploadId} not found`);
}
const sortedParts = [...upload.parts.entries()].sort(([a], [b]) => a - b);
const totalSize = sortedParts.reduce((sum, [, data]) => sum + data.length, 0);
const combined = new Uint8Array(totalSize);
let offset = 0;
for (const [, data] of sortedParts) {
combined.set(data, offset);
offset += data.length;
}
this.objects.set(upload.key, {data: combined});
this.multipartUploads.delete(params.uploadId);
}
async abortMultipartUpload(params: {bucket: string; key: string; uploadId: string}): Promise<void> {
this.abortMultipartUploadSpy(params);
this.multipartUploads.delete(params.uploadId);
}
getDeletedObjects(): Array<{bucket: string; key: string}> {
return [...this.deletedObjects];
}
@@ -246,6 +308,7 @@ export class MockStorageService implements IStorageService {
reset(): void {
this.objects.clear();
this.multipartUploads.clear();
this.deletedObjects = [];
this.copiedObjects = [];
this.config = {};
@@ -264,5 +327,9 @@ export class MockStorageService implements IStorageService {
this.deleteAvatarSpy.mockClear();
this.listObjectsSpy.mockClear();
this.deleteObjectsSpy.mockClear();
this.createMultipartUploadSpy.mockClear();
this.uploadPartSpy.mockClear();
this.completeMultipartUploadSpy.mockClear();
this.abortMultipartUploadSpy.mockClear();
}
}

View File

@@ -1,141 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {Config} from '@fluxer/api/src/Config';
import {HttpResponse, http} from 'msw';
interface GatewayRpcRequest {
method: string;
params: Record<string, unknown>;
}
interface GatewayRpcResponse {
result?: unknown;
error?: string;
}
export interface GatewayRpcRequestCapture {
method: string;
params: Record<string, unknown>;
authorization?: string;
}
export type GatewayRpcMockResponses = Map<string, unknown>;
export function createGatewayRpcHandler(
mockResponses: GatewayRpcMockResponses = new Map(),
requestCapture?: {current: GatewayRpcRequestCapture | null},
) {
const endpoint = `${Config.gateway.rpcEndpoint}/_rpc`;
return http.post(endpoint, async ({request}) => {
const body = (await request.json()) as GatewayRpcRequest;
const authorization = request.headers.get('authorization') ?? undefined;
if (requestCapture) {
requestCapture.current = {
method: body.method,
params: body.params,
authorization,
};
}
const mockResult = mockResponses.get(body.method);
if (mockResult === undefined) {
const errorResponse: GatewayRpcResponse = {
error: `No mock configured for method: ${body.method}`,
};
return HttpResponse.json(errorResponse, {status: 500});
}
if (mockResult instanceof Error) {
const errorResponse: GatewayRpcResponse = {
error: mockResult.message,
};
return HttpResponse.json(errorResponse, {status: 500});
}
const response: GatewayRpcResponse = {
result: mockResult,
};
return HttpResponse.json(response);
});
}
export function createGatewayRpcErrorHandler(status: number, errorMessage: string) {
const endpoint = `${Config.gateway.rpcEndpoint}/_rpc`;
return http.post(endpoint, () => {
const response: GatewayRpcResponse = {
error: errorMessage,
};
return HttpResponse.json(response, {status});
});
}
export function createGatewayRpcMethodErrorHandler(method: string, errorMessage: string) {
const endpoint = `${Config.gateway.rpcEndpoint}/_rpc`;
return http.post(endpoint, async ({request}) => {
const body = (await request.json()) as GatewayRpcRequest;
if (body.method === method) {
const response: GatewayRpcResponse = {
error: errorMessage,
};
return HttpResponse.json(response, {status: 500});
}
return HttpResponse.json({result: {}});
});
}
export function createGatewayRpcSequenceHandler(
method: string,
responses: Array<{result?: unknown; error?: string; status?: number}>,
) {
const endpoint = `${Config.gateway.rpcEndpoint}/_rpc`;
let callCount = 0;
return http.post(endpoint, async ({request}) => {
const body = (await request.json()) as GatewayRpcRequest;
if (body.method !== method) {
return HttpResponse.json({result: {}});
}
const responseConfig = responses[callCount] ?? responses[responses.length - 1];
callCount++;
if (responseConfig.error) {
const errorResponse: GatewayRpcResponse = {
error: responseConfig.error,
};
return HttpResponse.json(errorResponse, {status: responseConfig.status ?? 500});
}
const response: GatewayRpcResponse = {
result: responseConfig.result,
};
return HttpResponse.json(response, {status: responseConfig.status ?? 200});
});
}

View File

@@ -31,6 +31,7 @@ import type {IBlueskyOAuthService} from '@fluxer/api/src/bluesky/IBlueskyOAuthSe
import type {IChannelRepository} from '@fluxer/api/src/channel/IChannelRepository';
import type {ChannelRequestService} from '@fluxer/api/src/channel/services/ChannelRequestService';
import type {ChannelService} from '@fluxer/api/src/channel/services/ChannelService';
import type {ChunkedUploadService} from '@fluxer/api/src/channel/services/ChunkedUploadService';
import type {MessageRequestService} from '@fluxer/api/src/channel/services/message/MessageRequestService';
import type {ScheduledMessageService} from '@fluxer/api/src/channel/services/ScheduledMessageService';
import type {StreamPreviewService} from '@fluxer/api/src/channel/services/StreamPreviewService';
@@ -95,7 +96,7 @@ import type {UserContactChangeLogService} from '@fluxer/api/src/user/services/Us
import type {UserContentRequestService} from '@fluxer/api/src/user/services/UserContentRequestService';
import type {UserRelationshipRequestService} from '@fluxer/api/src/user/services/UserRelationshipRequestService';
import type {UserService} from '@fluxer/api/src/user/services/UserService';
import type {SendGridWebhookService} from '@fluxer/api/src/webhook/SendGridWebhookService';
import type {SweegoWebhookService} from '@fluxer/api/src/webhook/SweegoWebhookService';
import type {WebhookRequestService} from '@fluxer/api/src/webhook/WebhookRequestService';
import type {WebhookService} from '@fluxer/api/src/webhook/WebhookService';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
@@ -133,6 +134,7 @@ export interface HonoEnv {
cacheService: ICacheService;
channelService: ChannelService;
channelRequestService: ChannelRequestService;
chunkedUploadService: ChunkedUploadService;
messageRequestService: MessageRequestService;
channelRepository: IChannelRepository;
connectionService: ConnectionService;
@@ -188,7 +190,7 @@ export interface HonoEnv {
userChannelRequestService: UserChannelRequestService;
userContentRequestService: UserContentRequestService;
userRelationshipRequestService: UserRelationshipRequestService;
sendGridWebhookService: SendGridWebhookService;
sweegoWebhookService: SweegoWebhookService;
webhookService: WebhookService;
webhookRequestService: WebhookRequestService;
workerService: IWorkerService;

View File

@@ -36,6 +36,43 @@ export class TenorResolver extends BaseResolver {
async resolve(url: URL, content: Uint8Array, isNSFWAllowed: boolean = false): Promise<Array<MessageEmbedResponse>> {
const document = parseDocument(Buffer.from(content).toString('utf-8'));
const gifEmbed = await this.resolveFromOgImage(url, document, isNSFWAllowed);
if (gifEmbed) {
return [gifEmbed];
}
return this.resolveFromJsonLd(url, document, isNSFWAllowed);
}
private async resolveFromOgImage(
url: URL,
document: Document,
isNSFWAllowed: boolean,
): Promise<MessageEmbedResponse | null> {
const ogImageUrl = this.extractMetaContent(document, 'og:image');
if (!ogImageUrl || !this.isGifUrl(ogImageUrl)) {
return null;
}
const thumbnail = await this.resolveMediaURL(url, ogImageUrl, isNSFWAllowed);
if (!thumbnail) {
return null;
}
return {
type: 'gifv',
url: url.href,
provider: {name: 'Tenor', url: 'https://tenor.com'},
thumbnail,
};
}
private async resolveFromJsonLd(
url: URL,
document: Document,
isNSFWAllowed: boolean,
): Promise<Array<MessageEmbedResponse>> {
const jsonLdContent = this.extractJsonLdContent(document);
if (!jsonLdContent) {
return [];
@@ -53,6 +90,20 @@ export class TenorResolver extends BaseResolver {
return [embed];
}
private extractMetaContent(document: Document, property: string): string | null {
const element = selectOne(`meta[property="${property}"]`, document) as Element | null;
return element?.attribs['content'] ?? null;
}
private isGifUrl(url: string): boolean {
try {
const pathname = new URL(url).pathname;
return pathname.toLowerCase().endsWith('.gif');
} catch {
return url.toLowerCase().endsWith('.gif');
}
}
private extractJsonLdContent(document: Document): TenorJsonLd | null {
const scriptElement = selectOne('script.dynamic[type="application/ld+json"]', document) as Element | null;
if (scriptElement && scriptElement.children.length > 0) {

View File

@@ -19,9 +19,10 @@
import {TenorResolver} from '@fluxer/api/src/unfurler/resolvers/TenorResolver';
import {createMockContent, MockMediaService} from '@fluxer/api/src/unfurler/tests/ResolverTestUtils';
import {EmbedMediaFlags} from '@fluxer/constants/src/ChannelConstants';
import {afterEach, beforeEach, describe, expect, it} from 'vitest';
function createTenorHtml(options: {thumbnailUrl?: string; videoUrl?: string}): string {
function createTenorHtml(options: {thumbnailUrl?: string; videoUrl?: string; ogImage?: string}): string {
const jsonLd: Record<string, unknown> = {};
if (options.thumbnailUrl) {
@@ -31,9 +32,15 @@ function createTenorHtml(options: {thumbnailUrl?: string; videoUrl?: string}): s
jsonLd.video = {contentUrl: options.videoUrl};
}
const metaTags: Array<string> = [];
if (options.ogImage) {
metaTags.push(`<meta property="og:image" content="${options.ogImage}" />`);
}
return `<!DOCTYPE html>
<html>
<head>
${metaTags.join('\n')}
<script class="dynamic" type="application/ld+json">
${JSON.stringify(jsonLd)}
</script>
@@ -100,7 +107,51 @@ describe('TenorResolver', () => {
});
describe('resolve', () => {
it('returns gifv embed with thumbnail and video', async () => {
it('prefers og:image GIF URL over JSON-LD video', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
ogImage: 'https://media.tenor.com/cat-gif-AAAAC/cat.gif',
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
videoUrl: 'https://media.tenor.com/video.mp4',
});
mediaService.setMetadata('https://media.tenor.com/cat-gif-AAAAC/cat.gif', {
content_type: 'image/gif',
animated: true,
width: 320,
height: 240,
});
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(1);
expect(embeds[0]!.type).toBe('gifv');
expect(embeds[0]!.url).toBe('https://tenor.com/view/cat-gif-12345');
expect(embeds[0]!.provider).toEqual({name: 'Tenor', url: 'https://tenor.com'});
expect(embeds[0]!.thumbnail).toBeDefined();
expect(embeds[0]!.thumbnail!.url).toBe('https://media.tenor.com/cat-gif-AAAAC/cat.gif');
expect(embeds[0]!.thumbnail!.content_type).toBe('image/gif');
expect(embeds[0]!.thumbnail!.flags).toBe(EmbedMediaFlags.IS_ANIMATED);
expect(embeds[0]!.video).toBeUndefined();
});
it('falls back to JSON-LD when og:image is not a GIF', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
ogImage: 'https://media.tenor.com/thumbnail.png',
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
videoUrl: 'https://media.tenor.com/video.mp4',
});
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(1);
expect(embeds[0]!.type).toBe('gifv');
expect(embeds[0]!.thumbnail).toBeDefined();
expect(embeds[0]!.video).toBeDefined();
});
it('falls back to JSON-LD when og:image is absent', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
@@ -111,11 +162,29 @@ describe('TenorResolver', () => {
expect(embeds).toHaveLength(1);
expect(embeds[0]!.type).toBe('gifv');
expect(embeds[0]!.url).toBe('https://tenor.com/view/cat-gif-12345');
expect(embeds[0]!.provider).toEqual({name: 'Tenor', url: 'https://tenor.com'});
expect(embeds[0]!.thumbnail).toBeDefined();
expect(embeds[0]!.video).toBeDefined();
});
it('handles tenor page with only thumbnail', async () => {
it('falls back to JSON-LD when og:image GIF fails to resolve', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
ogImage: 'https://media.tenor.com/broken.gif',
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
videoUrl: 'https://media.tenor.com/video.mp4',
});
mediaService.markAsFailing('https://media.tenor.com/broken.gif');
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(1);
expect(embeds[0]!.type).toBe('gifv');
expect(embeds[0]!.thumbnail).toBeDefined();
expect(embeds[0]!.video).toBeDefined();
});
it('handles tenor page with only thumbnail in JSON-LD', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
@@ -128,7 +197,7 @@ describe('TenorResolver', () => {
expect(embeds[0]!.thumbnail).toBeDefined();
});
it('handles tenor page with only video', async () => {
it('handles tenor page with only video in JSON-LD', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
videoUrl: 'https://media.tenor.com/video.mp4',
@@ -141,7 +210,7 @@ describe('TenorResolver', () => {
expect(embeds[0]!.video).toBeDefined();
});
it('returns empty array when no JSON-LD found', async () => {
it('returns empty array when no JSON-LD found and no og:image', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = '<!DOCTYPE html><html><head></head><body></body></html>';
@@ -150,7 +219,7 @@ describe('TenorResolver', () => {
expect(embeds).toHaveLength(0);
});
it('returns empty array when JSON-LD is empty', async () => {
it('returns empty array when JSON-LD is empty and no og:image', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = `<!DOCTYPE html>
<html>
@@ -167,7 +236,7 @@ describe('TenorResolver', () => {
expect(embeds).toHaveLength(1);
});
it('returns empty array for invalid JSON-LD', async () => {
it('returns empty array for invalid JSON-LD and no og:image', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = `<!DOCTYPE html>
<html>
@@ -184,25 +253,33 @@ describe('TenorResolver', () => {
expect(embeds).toHaveLength(0);
});
it('handles NSFW content flag when allowed', async () => {
it('handles NSFW content with og:image GIF', async () => {
const url = new URL('https://tenor.com/view/adult-gif-12345');
const html = createTenorHtml({
thumbnailUrl: 'https://media.tenor.com/nsfw-thumbnail.png',
videoUrl: 'https://media.tenor.com/nsfw-video.mp4',
ogImage: 'https://media.tenor.com/nsfw.gif',
});
mediaService.markAsNsfw('https://media.tenor.com/nsfw-thumbnail.png');
mediaService.markAsNsfw('https://media.tenor.com/nsfw-video.mp4');
mediaService.markAsNsfw('https://media.tenor.com/nsfw.gif');
mediaService.setMetadata('https://media.tenor.com/nsfw.gif', {
content_type: 'image/gif',
animated: true,
});
const embeds = await resolver.resolve(url, createMockContent(html), true);
expect(embeds).toHaveLength(1);
expect(embeds[0]!.thumbnail!.flags).toBe(EmbedMediaFlags.IS_ANIMATED | EmbedMediaFlags.CONTAINS_EXPLICIT_MEDIA);
});
it('preserves URL in embed output', async () => {
const url = new URL('https://tenor.com/view/special-chars-gif%20test-12345');
const html = createTenorHtml({
thumbnailUrl: 'https://media.tenor.com/thumbnail.png',
ogImage: 'https://media.tenor.com/test.gif',
});
mediaService.setMetadata('https://media.tenor.com/test.gif', {
content_type: 'image/gif',
animated: true,
});
const embeds = await resolver.resolve(url, createMockContent(html));
@@ -210,11 +287,12 @@ describe('TenorResolver', () => {
expect(embeds[0]!.url).toBe('https://tenor.com/view/special-chars-gif%20test-12345');
});
it('handles missing dynamic class on script tag', async () => {
it('handles missing dynamic class on script tag with og:image', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = `<!DOCTYPE html>
<html>
<head>
<meta property="og:image" content="https://media.tenor.com/cat.gif" />
<script type="application/ld+json">
{"image": {"thumbnailUrl": "https://media.tenor.com/thumbnail.png"}}
</script>
@@ -222,9 +300,62 @@ describe('TenorResolver', () => {
<body></body>
</html>`;
mediaService.setMetadata('https://media.tenor.com/cat.gif', {
content_type: 'image/gif',
animated: true,
});
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(0);
expect(embeds).toHaveLength(1);
expect(embeds[0]!.thumbnail!.url).toBe('https://media.tenor.com/cat.gif');
expect(embeds[0]!.video).toBeUndefined();
});
it('handles og:image with uppercase .GIF extension', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = createTenorHtml({
ogImage: 'https://media.tenor.com/cat.GIF',
});
mediaService.setMetadata('https://media.tenor.com/cat.GIF', {
content_type: 'image/gif',
animated: true,
});
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(1);
expect(embeds[0]!.thumbnail!.url).toBe('https://media.tenor.com/cat.GIF');
expect(embeds[0]!.video).toBeUndefined();
});
it('resolves og:image GIF with only og:image present (no JSON-LD)', async () => {
const url = new URL('https://tenor.com/view/cat-gif-12345');
const html = `<!DOCTYPE html>
<html>
<head>
<meta property="og:image" content="https://media.tenor.com/cat.gif" />
</head>
<body></body>
</html>`;
mediaService.setMetadata('https://media.tenor.com/cat.gif', {
content_type: 'image/gif',
animated: true,
width: 500,
height: 400,
});
const embeds = await resolver.resolve(url, createMockContent(html));
expect(embeds).toHaveLength(1);
expect(embeds[0]!.type).toBe('gifv');
expect(embeds[0]!.thumbnail).toBeDefined();
expect(embeds[0]!.thumbnail!.url).toBe('https://media.tenor.com/cat.gif');
expect(embeds[0]!.thumbnail!.width).toBe(500);
expect(embeds[0]!.thumbnail!.height).toBe(400);
expect(embeds[0]!.video).toBeUndefined();
});
});
});

View File

@@ -155,7 +155,7 @@ export function mapUserToPrivateResponse(user: User): UserPrivateResponse {
return {
...partialResponse,
flags: Number((user.flags ?? 0n) & PUBLIC_USER_FLAGS_WITHOUT_STAFF),
flags: mapUserFlagsToPublicBitfield(user),
is_staff: isStaff,
acls: Array.from(user.acls),
traits,

View File

@@ -127,9 +127,6 @@ export class UserDataRepository {
const result = await executeVersionedUpdate<UserRow, 'user_id'>(
async () => {
if (oldData !== undefined) {
return oldData;
}
const user = await this.findUnique(userId);
return user?.toRow() ?? null;
},
@@ -138,6 +135,7 @@ export class UserDataRepository {
patch: buildPatchFromData(data, current, USER_COLUMNS, ['user_id']),
}),
Users,
{initialData: oldData},
);
return {finalVersion: result.finalVersion};
@@ -146,9 +144,6 @@ export class UserDataRepository {
async patchUser(userId: UserID, patch: UserPatch, oldData?: UserRow | null): Promise<{finalVersion: number | null}> {
const result = await executeVersionedUpdate<UserRow, 'user_id'>(
async () => {
if (oldData !== undefined) {
return oldData;
}
const user = await this.findUnique(userId);
return user?.toRow() ?? null;
},
@@ -157,6 +152,7 @@ export class UserDataRepository {
patch,
}),
Users,
{initialData: oldData},
);
return {finalVersion: result.finalVersion};

View File

@@ -0,0 +1,161 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createTestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {
acceptInvite,
createChannel,
createChannelInvite,
createGuild,
} from '@fluxer/api/src/guild/tests/GuildTestUtils';
import {ensureSessionStarted, sendMessage} from '@fluxer/api/src/message/tests/MessageTestUtils';
import {type ApiTestHarness, createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {HTTP_STATUS} from '@fluxer/api/src/test/TestConstants';
import {createBuilder, createBuilderWithoutAuth} from '@fluxer/api/src/test/TestRequestBuilder';
import {
deleteAccount,
setPendingDeletionAt,
triggerDeletionWorker,
waitForDeletionCompletion,
} from '@fluxer/api/src/user/tests/UserTestUtils';
import type {MessageResponse} from '@fluxer/schema/src/domains/message/MessageResponseSchemas';
import type {UserPartialResponse} from '@fluxer/schema/src/domains/user/UserResponseSchemas';
import {afterEach, beforeEach, describe, expect, test} from 'vitest';
describe('Account Delete Mention Resolution', () => {
let harness: ApiTestHarness;
beforeEach(async () => {
harness = await createApiTestHarness();
});
afterEach(async () => {
await harness?.shutdown();
});
test('messages mentioning a deleted user remain readable', async () => {
const alice = await createTestAccount(harness);
const bob = await createTestAccount(harness);
const guild = await createGuild(harness, alice.token, 'Mention Test Guild');
let channelId = guild.system_channel_id;
if (!channelId) {
const channel = await createChannel(harness, alice.token, guild.id, 'general');
channelId = channel.id;
}
const invite = await createChannelInvite(harness, alice.token, channelId);
await acceptInvite(harness, bob.token, invite.code);
await ensureSessionStarted(harness, alice.token);
const mentionMessage = await sendMessage(harness, alice.token, channelId, `Hello <@${bob.userId}>`);
expect(mentionMessage.mentions).toBeDefined();
expect(mentionMessage.mentions!.length).toBe(1);
await deleteAccount(harness, bob.token, bob.password);
const past = new Date();
past.setMinutes(past.getMinutes() - 1);
await setPendingDeletionAt(harness, bob.userId, past);
await triggerDeletionWorker(harness);
await waitForDeletionCompletion(harness, bob.userId);
await createBuilderWithoutAuth(harness).post('/test/cache-clear').expect(HTTP_STATUS.OK).execute();
const messages = await createBuilder<Array<MessageResponse>>(harness, alice.token)
.get(`/channels/${channelId}/messages?limit=50`)
.expect(HTTP_STATUS.OK)
.execute();
const mentionMsg = messages.find((m) => m.id === mentionMessage.id);
expect(mentionMsg).toBeDefined();
expect(mentionMsg!.mentions).toBeDefined();
expect(mentionMsg!.mentions!.length).toBe(1);
const mention = mentionMsg!.mentions![0];
expect(mention.id).toBe(bob.userId);
expect(mention.username).toBe('DeletedUser');
expect(mention.discriminator).toBe('0000');
expect(mention.global_name).toBe('Deleted User');
expect(mention.avatar).toBeNull();
expect(mentionMsg!.author.id).toBe(alice.userId);
expect(mentionMsg!.author.username).not.toBe('DeletedUser');
}, 60_000);
test('message author resolution works for users with DELETED flag', async () => {
const account = await createTestAccount(harness);
const guild = await createGuild(harness, account.token, 'Author Test Guild');
let channelId = guild.system_channel_id;
if (!channelId) {
const channel = await createChannel(harness, account.token, guild.id, 'general');
channelId = channel.id;
}
await ensureSessionStarted(harness, account.token);
const sentMessage = await sendMessage(harness, account.token, channelId, 'Hello world');
const viewer = await createTestAccount(harness);
const invite = await createChannelInvite(harness, account.token, channelId);
await acceptInvite(harness, viewer.token, invite.code);
await createBuilderWithoutAuth(harness)
.post(`/test/users/${account.userId}/security-flags`)
.body({set_flags: ['DELETED']})
.expect(HTTP_STATUS.OK)
.execute();
await createBuilderWithoutAuth(harness).post('/test/cache-clear').expect(HTTP_STATUS.OK).execute();
const messages = await createBuilder<Array<MessageResponse>>(harness, viewer.token)
.get(`/channels/${channelId}/messages?limit=50`)
.expect(HTTP_STATUS.OK)
.execute();
const msg = messages.find((m) => m.id === sentMessage.id);
expect(msg).toBeDefined();
expect(msg!.author.username).toBe('DeletedUser');
expect(msg!.author.discriminator).toBe('0000');
expect(msg!.author.global_name).toBe('Deleted User');
expect(msg!.author.avatar).toBeNull();
}, 60_000);
test('direct user lookup returns deleted user fallback for deleted users', async () => {
const alice = await createTestAccount(harness);
const bob = await createTestAccount(harness);
await deleteAccount(harness, bob.token, bob.password);
const past = new Date();
past.setMinutes(past.getMinutes() - 1);
await setPendingDeletionAt(harness, bob.userId, past);
await triggerDeletionWorker(harness);
await waitForDeletionCompletion(harness, bob.userId);
const user = await createBuilder<UserPartialResponse>(harness, alice.token)
.get(`/users/${bob.userId}`)
.expect(HTTP_STATUS.OK)
.execute();
expect(user.id).toBe(bob.userId);
expect(user.username).toBe('DeletedUser');
expect(user.discriminator).toBe('0000');
expect(user.global_name).toBe('Deleted User');
expect(user.avatar).toBeNull();
}, 60_000);
});

View File

@@ -83,13 +83,24 @@ describe('User Account And Settings', () => {
expect(Object.keys(preloadData).length).toBe(0);
});
test('reject getting nonexistent user', async () => {
test('nonexistent user returns deleted user fallback', async () => {
const account = await createTestAccount(harness);
await createBuilder(harness, account.token)
const user = await createBuilder<{
id: string;
username: string;
discriminator: string;
global_name: string | null;
avatar: string | null;
}>(harness, account.token)
.get(`/users/${TEST_IDS.NONEXISTENT_USER}`)
.expect(HTTP_STATUS.NOT_FOUND)
.expect(HTTP_STATUS.OK)
.execute();
expect(user.id).toBe(TEST_IDS.NONEXISTENT_USER);
expect(user.username).toBe('DeletedUser');
expect(user.discriminator).toBe('0000');
expect(user.avatar).toBeNull();
});
test('reject getting nonexistent user profile', async () => {

View File

@@ -0,0 +1,131 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {createTestAccount} from '@fluxer/api/src/auth/tests/AuthTestUtils';
import {type ApiTestHarness, createApiTestHarness} from '@fluxer/api/src/test/ApiTestHarness';
import {HTTP_STATUS} from '@fluxer/api/src/test/TestConstants';
import {createBuilder} from '@fluxer/api/src/test/TestRequestBuilder';
import {fetchUser, fetchUserMe, updateUserProfile} from '@fluxer/api/src/user/tests/UserTestUtils';
import {PublicUserFlags, UserFlags} from '@fluxer/constants/src/UserConstants';
import {afterAll, beforeAll, beforeEach, describe, expect, test} from 'vitest';
async function setUserFlags(harness: ApiTestHarness, userId: string, flags: bigint): Promise<void> {
await createBuilder(harness, '')
.patch(`/test/users/${userId}/flags`)
.body({flags: flags.toString()})
.expect(HTTP_STATUS.OK)
.execute();
}
describe('User flags in responses', () => {
let harness: ApiTestHarness;
beforeAll(async () => {
harness = await createApiTestHarness();
});
afterAll(async () => {
await harness?.shutdown();
});
beforeEach(async () => {
await harness.reset();
});
test('GET /users/@me preserves staff flag when STAFF_HIDDEN is not set', async () => {
const account = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF);
const {json} = await fetchUserMe(harness, account.token);
expect(json.flags & PublicUserFlags.STAFF).toBe(PublicUserFlags.STAFF);
expect(json.is_staff).toBe(true);
});
test('GET /users/@me hides staff flag when STAFF_HIDDEN is set', async () => {
const account = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF | UserFlags.STAFF_HIDDEN);
const {json} = await fetchUserMe(harness, account.token);
expect(json.flags & PublicUserFlags.STAFF).toBe(0);
expect(json.is_staff).toBe(true);
});
test('PATCH /users/@me preserves staff flag after profile update', async () => {
const account = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF);
const {json} = await updateUserProfile(harness, account.token, {
bio: 'updated bio',
});
expect(json.flags & PublicUserFlags.STAFF).toBe(PublicUserFlags.STAFF);
expect(json.is_staff).toBe(true);
});
test('PATCH /users/@me preserves staff flag with STAFF_HIDDEN after profile update', async () => {
const account = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF | UserFlags.STAFF_HIDDEN);
const {json} = await updateUserProfile(harness, account.token, {
bio: 'updated bio',
});
expect(json.flags & PublicUserFlags.STAFF).toBe(0);
expect(json.is_staff).toBe(true);
});
test('GET /users/:id returns staff flag in partial response', async () => {
const account = await createTestAccount(harness);
const viewer = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF);
const {json} = await fetchUser(harness, account.userId, viewer.token);
expect(json.flags & PublicUserFlags.STAFF).toBe(PublicUserFlags.STAFF);
});
test('GET /users/:id hides staff flag when STAFF_HIDDEN is set', async () => {
const account = await createTestAccount(harness);
const viewer = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF | UserFlags.STAFF_HIDDEN);
const {json} = await fetchUser(harness, account.userId, viewer.token);
expect(json.flags & PublicUserFlags.STAFF).toBe(0);
});
test('non-staff user has flags 0', async () => {
const account = await createTestAccount(harness);
const {json} = await fetchUserMe(harness, account.token);
expect(json.flags).toBe(0);
expect(json.is_staff).toBe(false);
});
test('PATCH /users/@me does not leak internal flags', async () => {
const account = await createTestAccount(harness);
await setUserFlags(harness, account.userId, UserFlags.STAFF | UserFlags.HIGH_GLOBAL_RATE_LIMIT);
const {json: me} = await fetchUserMe(harness, account.token);
expect(me.flags & PublicUserFlags.STAFF).toBe(PublicUserFlags.STAFF);
expect(me.flags & Number(UserFlags.HIGH_GLOBAL_RATE_LIMIT)).toBe(0);
const updated = await updateUserProfile(harness, account.token, {
bio: 'checking internal flags',
});
expect(updated.json.flags & PublicUserFlags.STAFF).toBe(PublicUserFlags.STAFF);
expect(updated.json.flags & Number(UserFlags.HIGH_GLOBAL_RATE_LIMIT)).toBe(0);
});
});

View File

@@ -19,37 +19,15 @@
import dns from 'node:dns';
import {Config} from '@fluxer/api/src/Config';
import {Logger} from '@fluxer/api/src/Logger';
import type {ICacheService} from '@fluxer/cache/src/ICacheService';
import {getRegionDisplayName} from '@fluxer/geo_utils/src/RegionFormatting';
import type {GeoipResult} from '@fluxer/geoip/src/GeoipLookup';
import {formatGeoipLocation, lookupGeoipByIp} from '@fluxer/geoip/src/GeoipLookup';
import {extractClientIp} from '@fluxer/ip_utils/src/ClientIp';
import {isValidIp, normalizeIpString} from '@fluxer/ip_utils/src/IpAddress';
import {ms, seconds} from 'itty-time';
import maxmind, {type CityResponse, type Reader} from 'maxmind';
import {seconds} from 'itty-time';
const REVERSE_DNS_CACHE_TTL_SECONDS = seconds('1 day');
const REVERSE_DNS_CACHE_PREFIX = 'reverse-dns:';
export const UNKNOWN_LOCATION = 'Unknown Location';
export interface GeoipResult {
countryCode: string | null;
normalizedIp: string | null;
city: string | null;
region: string | null;
countryName: string | null;
}
type CacheEntry = {
result: GeoipResult;
expiresAt: number;
};
const geoipCache = new Map<string, CacheEntry>();
let maxmindReader: Reader<CityResponse> | null = null;
let maxmindReaderPromise: Promise<Reader<CityResponse>> | null = null;
export async function lookupGeoip(req: Request): Promise<GeoipResult>;
export async function lookupGeoip(ip: string): Promise<GeoipResult>;
export async function lookupGeoip(input: string | Request): Promise<GeoipResult> {
@@ -58,113 +36,9 @@ export async function lookupGeoip(input: string | Request): Promise<GeoipResult>
? input
: extractClientIp(input, {trustCfConnectingIp: Config.proxy.trust_cf_connecting_ip});
if (!ip) {
return buildFallbackResult('');
return {countryCode: null, normalizedIp: null, city: null, region: null, countryName: null};
}
return lookupGeoipFromString(ip);
}
function buildFallbackResult(clean: string): GeoipResult {
return {
countryCode: null,
normalizedIp: clean || null,
city: null,
region: null,
countryName: null,
};
}
async function ensureMaxmindReader(): Promise<Reader<CityResponse>> {
if (maxmindReader) return maxmindReader;
if (!maxmindReaderPromise) {
const dbPath = Config.geoip.maxmindDbPath;
if (!dbPath) {
throw new Error('Missing MaxMind DB path');
}
maxmindReaderPromise = maxmind
.open<CityResponse>(dbPath)
.then((reader) => {
maxmindReader = reader;
return reader;
})
.catch((error) => {
maxmindReaderPromise = null;
throw error;
});
}
return maxmindReaderPromise;
}
function stateLabel(record?: CityResponse): string | null {
const subdivision = record?.subdivisions?.[0];
if (!subdivision) return null;
return subdivision.names?.en || subdivision.iso_code || null;
}
async function lookupMaxmind(clean: string): Promise<GeoipResult> {
const dbPath = Config.geoip.maxmindDbPath;
if (!dbPath) {
return buildFallbackResult(clean);
}
try {
const reader = await ensureMaxmindReader();
const record = reader.get(clean);
if (!record) return buildFallbackResult(clean);
const isoCode = record.country?.iso_code;
const countryCode = isoCode ? isoCode.toUpperCase() : null;
return {
countryCode,
normalizedIp: clean,
city: record.city?.names?.en ?? null,
region: stateLabel(record),
countryName: record.country?.names?.en ?? (countryCode ? countryDisplayName(countryCode) : null) ?? null,
};
} catch (error) {
const message = (error as Error).message ?? 'unknown';
Logger.warn({error, maxmind_db_path: dbPath, message}, 'MaxMind lookup failed');
return buildFallbackResult(clean);
}
}
async function resolveGeoip(clean: string): Promise<GeoipResult> {
const now = Date.now();
const cached = geoipCache.get(clean);
if (cached && now < cached.expiresAt) {
return cached.result;
}
const result = await lookupMaxmind(clean);
geoipCache.set(clean, {result, expiresAt: now + ms('10 minutes')});
return result;
}
async function lookupGeoipFromString(value: string): Promise<GeoipResult> {
const clean = normalizeIpString(value);
if (!isValidIp(clean)) {
return buildFallbackResult(clean);
}
return resolveGeoip(clean);
}
function countryDisplayName(code: string, locale = 'en'): string | null {
const upper = code.toUpperCase();
if (!isAsciiUpperAlpha2(upper)) return null;
return getRegionDisplayName(upper, {locale}) ?? null;
}
export function formatGeoipLocation(result: GeoipResult): string | null {
const parts: Array<string> = [];
if (result.city) parts.push(result.city);
if (result.region) parts.push(result.region);
const countryLabel = result.countryName ?? result.countryCode;
if (countryLabel) parts.push(countryLabel);
return parts.length > 0 ? parts.join(', ') : null;
return lookupGeoipByIp(ip, Config.geoip.maxmindDbPath);
}
export async function getIpAddressReverse(ip: string, cacheService?: ICacheService): Promise<string | null> {
@@ -190,16 +64,6 @@ export async function getIpAddressReverse(ip: string, cacheService?: ICacheServi
}
export async function getLocationLabelFromIp(ip: string): Promise<string | null> {
const result = await lookupGeoip(ip);
const result = await lookupGeoipByIp(ip, Config.geoip.maxmindDbPath);
return formatGeoipLocation(result);
}
function isAsciiUpperAlpha2(value: string): boolean {
return (
value.length === 2 &&
value.charCodeAt(0) >= 65 &&
value.charCodeAt(0) <= 90 &&
value.charCodeAt(1) >= 65 &&
value.charCodeAt(1) <= 90
);
}

View File

@@ -1,250 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {KeyObject} from 'node:crypto';
import crypto from 'node:crypto';
import type {UserID} from '@fluxer/api/src/BrandedTypes';
import type {GatewayDispatchEvent} from '@fluxer/api/src/constants/Gateway';
import type {UserRow} from '@fluxer/api/src/database/types/UserTypes';
import {getMetricsService} from '@fluxer/api/src/infrastructure/MetricsService';
import {Logger} from '@fluxer/api/src/Logger';
import type {User} from '@fluxer/api/src/models/User';
import {mapUserToPrivateResponse} from '@fluxer/api/src/user/UserMappers';
import {SuspiciousActivityFlags} from '@fluxer/constants/src/UserConstants';
export interface ISendGridUserRepository {
findByEmail(email: string): Promise<User | null>;
patchUpsert(userId: UserID, patch: Partial<UserRow>, currentRow: UserRow): Promise<User | null>;
}
export interface ISendGridGatewayService {
dispatchPresence(params: {userId: UserID; event: GatewayDispatchEvent; data: unknown}): Promise<void>;
}
type SendGridEventType =
| 'bounce'
| 'dropped'
| 'delivered'
| 'processed'
| 'deferred'
| 'open'
| 'click'
| 'spamreport'
| 'unsubscribe'
| 'group_unsubscribe'
| 'group_resubscribe'
| string;
export interface SendGridEvent {
email: string;
event: SendGridEventType;
timestamp?: number;
sg_event_id?: string;
sg_message_id?: string;
reason?: string;
status?: string;
type?: 'bounce' | 'blocked' | string;
bounce_classification?: string;
}
export class SendGridWebhookService {
constructor(
private readonly userRepository: ISendGridUserRepository,
private readonly gatewayService: ISendGridGatewayService,
) {}
verifySignature(payload: string, signature: string, timestamp: string, publicKey: string): boolean {
try {
const keyObject = this.resolvePublicKey(publicKey);
const signatureBytes = decodeSignature(signature);
const signedPayload = timestamp + payload;
const verifier = crypto.createVerify('sha256');
verifier.update(signedPayload);
verifier.end();
return verifier.verify(keyObject, signatureBytes);
} catch (error) {
Logger.error({error}, 'Error verifying SendGrid webhook signature');
return false;
}
}
async handleWebhook(params: {
body: string;
signature?: string;
timestamp?: string;
secret?: string | null;
}): Promise<{status: number; body: string | null}> {
const {body, signature, timestamp, secret} = params;
if (secret) {
if (!signature || !timestamp) {
getMetricsService().counter({name: 'fluxer.sendgrid.webhooks.rejected', value: 1});
Logger.warn('SendGrid webhook missing signature headers');
return {status: 401, body: 'Missing signature headers'};
}
const isValid = this.verifySignature(body, signature, timestamp, secret);
if (!isValid) {
getMetricsService().counter({name: 'fluxer.sendgrid.webhooks.rejected', value: 1});
Logger.warn('SendGrid webhook signature verification failed');
return {status: 401, body: 'Invalid signature'};
}
}
let events: Array<SendGridEvent>;
try {
const parsed = JSON.parse(body) as SendGridEvent | Array<SendGridEvent>;
events = Array.isArray(parsed) ? parsed : [parsed];
} catch (parseError) {
getMetricsService().counter({name: 'fluxer.sendgrid.webhooks.invalid_json', value: 1});
Logger.error({parseError, body: body.slice(0, 1000)}, 'Failed to parse SendGrid webhook JSON body');
return {status: 400, body: 'Invalid JSON'};
}
await this.processEvents(events);
getMetricsService().counter({
name: 'fluxer.sendgrid.webhooks.processed',
value: 1,
dimensions: {
event_count: events.length.toString(),
},
});
return {status: 200, body: null};
}
async processEvents(events: Array<SendGridEvent>): Promise<void> {
for (const event of events) {
try {
await this.processEvent(event);
} catch (error) {
Logger.error({error, event}, 'Error processing SendGrid webhook event');
}
}
}
private async processEvent(event: SendGridEvent): Promise<void> {
if (event.event !== 'bounce' && event.event !== 'dropped') {
Logger.debug({event: event.event, email: event.email}, 'SendGrid event received (ignored)');
return;
}
if (event.event === 'bounce') {
if (event.type === 'blocked') {
Logger.info(
{email: event.email, reason: event.reason, type: event.type},
'SendGrid soft bounce (blocked) received',
);
return;
}
if (event.type === 'bounce') {
await this.handleHardBounce(event);
return;
}
}
if (event.event === 'dropped') {
const reason = event.reason?.toLowerCase() || '';
if (reason.includes('bounced') || reason.includes('invalid')) {
await this.handleHardBounce(event);
return;
}
}
}
private async handleHardBounce(event: SendGridEvent): Promise<void> {
Logger.warn(
{
email: event.email,
event: event.event,
reason: event.reason,
bounce_classification: event.bounce_classification,
sg_event_id: event.sg_event_id,
},
'Processing hard bounce - marking email as invalid',
);
const user = await this.userRepository.findByEmail(event.email);
if (!user) {
Logger.warn({email: event.email}, 'User not found for bounced email');
return;
}
if (user.emailBounced) {
Logger.debug({userId: user.id, email: event.email}, 'Email already marked as bounced');
return;
}
const currentFlags = user.suspiciousActivityFlags || 0;
const newFlags = currentFlags | SuspiciousActivityFlags.REQUIRE_REVERIFIED_EMAIL;
const updatedUser = await this.userRepository.patchUpsert(
user.id,
{
email_bounced: true,
email_verified: false,
suspicious_activity_flags: newFlags,
},
user.toRow(),
);
Logger.info(
{userId: user.id, email: event.email, reason: event.reason},
'User email marked as bounced and requires reverification',
);
if (updatedUser) {
await this.gatewayService.dispatchPresence({
userId: updatedUser.id,
event: 'USER_UPDATE',
data: mapUserToPrivateResponse(updatedUser),
});
}
}
private resolvePublicKey(rawPublicKey: string): KeyObject {
const trimmed = rawPublicKey.trim();
if (trimmed.includes('BEGIN PUBLIC KEY')) {
return crypto.createPublicKey(trimmed);
}
const normalisedBase64 = trimmed.replace(/\s+/g, '');
const der = decodeBase64OrBase64Url(normalisedBase64);
return crypto.createPublicKey({
key: der,
format: 'der',
type: 'spki',
});
}
}
function decodeSignature(signature: string): Buffer {
return decodeBase64OrBase64Url(signature.trim());
}
function decodeBase64OrBase64Url(value: string): Buffer {
try {
return Buffer.from(value, 'base64');
} catch {
return Buffer.from(value.replace(/-/g, '+').replace(/_/g, '/'), 'base64');
}
}

View File

@@ -0,0 +1,193 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import crypto from 'node:crypto';
import type {UserID} from '@fluxer/api/src/BrandedTypes';
import type {GatewayDispatchEvent} from '@fluxer/api/src/constants/Gateway';
import type {UserRow} from '@fluxer/api/src/database/types/UserTypes';
import {getMetricsService} from '@fluxer/api/src/infrastructure/MetricsService';
import {Logger} from '@fluxer/api/src/Logger';
import type {User} from '@fluxer/api/src/models/User';
import {mapUserToPrivateResponse} from '@fluxer/api/src/user/UserMappers';
import {SuspiciousActivityFlags} from '@fluxer/constants/src/UserConstants';
export interface ISweegoUserRepository {
findByEmail(email: string): Promise<User | null>;
patchUpsert(userId: UserID, patch: Partial<UserRow>, currentRow: UserRow): Promise<User | null>;
}
export interface ISweegoGatewayService {
dispatchPresence(params: {userId: UserID; event: GatewayDispatchEvent; data: unknown}): Promise<void>;
}
type SweegoEventType =
| 'email_sent'
| 'delivered'
| 'soft-bounce'
| 'hard_bounce'
| 'list_unsub'
| 'complaint'
| 'email_clicked'
| 'email_opened'
| string;
export interface SweegoEvent {
event_type: SweegoEventType;
timestamp: string;
swg_uid?: string;
event_id?: string;
details?: string;
channel?: string;
transaction_id?: string;
recipient: string;
domain_from?: string;
campaign_type?: string;
campaign_id?: string;
}
export class SweegoWebhookService {
constructor(
private readonly userRepository: ISweegoUserRepository,
private readonly gatewayService: ISweegoGatewayService,
) {}
verifySignature(body: string, webhookId: string, timestamp: string, signature: string, secret: string): boolean {
try {
const secretBytes = Buffer.from(secret, 'base64');
const contentToSign = `${webhookId}.${timestamp}.${body}`;
const digest = crypto.createHmac('sha256', secretBytes).update(contentToSign).digest();
const computedSignature = digest.toString('base64');
const computedBuffer = Buffer.from(computedSignature);
const receivedBuffer = Buffer.from(signature);
if (computedBuffer.length !== receivedBuffer.length) {
return false;
}
return crypto.timingSafeEqual(computedBuffer, receivedBuffer);
} catch (error) {
Logger.error({error}, 'Error verifying Sweego webhook signature');
return false;
}
}
async handleWebhook(params: {
body: string;
webhookId?: string;
timestamp?: string;
signature?: string;
secret?: string | null;
}): Promise<{status: number; body: string | null}> {
const {body, webhookId, timestamp, signature, secret} = params;
if (secret) {
if (!webhookId || !timestamp || !signature) {
getMetricsService().counter({name: 'fluxer.sweego.webhooks.rejected', value: 1});
Logger.warn('Sweego webhook missing signature headers');
return {status: 401, body: 'Missing signature headers'};
}
const isValid = this.verifySignature(body, webhookId, timestamp, signature, secret);
if (!isValid) {
getMetricsService().counter({name: 'fluxer.sweego.webhooks.rejected', value: 1});
Logger.warn('Sweego webhook signature verification failed');
return {status: 401, body: 'Invalid signature'};
}
}
let event: SweegoEvent;
try {
event = JSON.parse(body) as SweegoEvent;
} catch (parseError) {
getMetricsService().counter({name: 'fluxer.sweego.webhooks.invalid_json', value: 1});
Logger.error({parseError, body: body.slice(0, 1000)}, 'Failed to parse Sweego webhook JSON body');
return {status: 400, body: 'Invalid JSON'};
}
await this.processEvent(event);
getMetricsService().counter({name: 'fluxer.sweego.webhooks.processed', value: 1});
return {status: 200, body: null};
}
async processEvent(event: SweegoEvent): Promise<void> {
if (event.event_type !== 'soft-bounce' && event.event_type !== 'hard_bounce') {
Logger.debug({eventType: event.event_type, recipient: event.recipient}, 'Sweego event received (ignored)');
return;
}
if (event.event_type === 'hard_bounce') {
await this.handleHardBounce(event);
return;
}
Logger.info(
{recipient: event.recipient, details: event.details, eventType: event.event_type},
'Sweego soft bounce received',
);
}
private async handleHardBounce(event: SweegoEvent): Promise<void> {
Logger.warn(
{
recipient: event.recipient,
eventType: event.event_type,
details: event.details,
eventId: event.event_id,
},
'Processing hard bounce - marking email as invalid',
);
const user = await this.userRepository.findByEmail(event.recipient);
if (!user) {
Logger.warn({recipient: event.recipient}, 'User not found for bounced email');
return;
}
if (user.emailBounced) {
Logger.debug({userId: user.id, recipient: event.recipient}, 'Email already marked as bounced');
return;
}
const currentFlags = user.suspiciousActivityFlags || 0;
const newFlags = currentFlags | SuspiciousActivityFlags.REQUIRE_REVERIFIED_EMAIL;
const updatedUser = await this.userRepository.patchUpsert(
user.id,
{
email_bounced: true,
email_verified: false,
suspicious_activity_flags: newFlags,
},
user.toRow(),
);
Logger.info(
{userId: user.id, recipient: event.recipient, details: event.details},
'User email marked as bounced and requires reverification',
);
if (updatedUser) {
await this.gatewayService.dispatchPresence({
userId: updatedUser.id,
event: 'USER_UPDATE',
data: mapUserToPrivateResponse(updatedUser),
});
}
}
}

View File

@@ -476,11 +476,12 @@ export function WebhookController(app: HonoApp) {
return response;
});
app.post('/webhooks/sendgrid', async (ctx) => {
const response = await ctx.get('webhookRequestService').handleSendGridWebhook({
app.post('/webhooks/sweego', async (ctx) => {
const response = await ctx.get('webhookRequestService').handleSweegoWebhook({
body: await ctx.req.text(),
signature: ctx.req.header('X-Twilio-Email-Event-Webhook-Signature') ?? undefined,
timestamp: ctx.req.header('X-Twilio-Email-Event-Webhook-Timestamp') ?? undefined,
webhookId: ctx.req.header('webhook-id') ?? undefined,
timestamp: ctx.req.header('webhook-timestamp') ?? undefined,
signature: ctx.req.header('webhook-signature') ?? undefined,
});
return response;
});

View File

@@ -154,15 +154,13 @@ export class WebhookRepository extends IWebhookRepository {
};
const result = await executeVersionedUpdate<WebhookRow, 'webhook_id' | 'webhook_token'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<WebhookRow>(FETCH_WEBHOOK_BY_ID_CQL, {webhook_id: webhookId});
},
async () => fetchOne<WebhookRow>(FETCH_WEBHOOK_BY_ID_CQL, {webhook_id: webhookId}),
(current) => ({
pk: {webhook_id: webhookId, webhook_token: updatedData.webhook_token},
patch: buildPatchFromData(updatedData, current, WEBHOOK_COLUMNS, ['webhook_id', 'webhook_token']),
}),
Webhooks,
{initialData: oldData},
);
const batch = new BatchBuilder();

View File

@@ -27,7 +27,7 @@ import type {IMediaService} from '@fluxer/api/src/infrastructure/IMediaService';
import type {LiveKitWebhookService} from '@fluxer/api/src/infrastructure/LiveKitWebhookService';
import type {UserCacheService} from '@fluxer/api/src/infrastructure/UserCacheService';
import type {RequestCache} from '@fluxer/api/src/middleware/RequestCacheMiddleware';
import type {SendGridWebhookService} from '@fluxer/api/src/webhook/SendGridWebhookService';
import type {SweegoWebhookService} from '@fluxer/api/src/webhook/SweegoWebhookService';
import {transformSlackWebhookRequest} from '@fluxer/api/src/webhook/transformers/SlackTransformer';
import {
mapWebhooksToResponse,
@@ -149,10 +149,11 @@ interface LiveKitWebhookParams {
authHeader?: string;
}
interface SendGridWebhookParams {
interface SweegoWebhookParams {
body: string;
signature?: string;
webhookId?: string;
timestamp?: string;
signature?: string;
}
export class WebhookRequestService {
@@ -164,7 +165,7 @@ export class WebhookRequestService {
private readonly userCacheService: UserCacheService,
private readonly mediaService: IMediaService,
private readonly liveKitWebhookService: LiveKitWebhookService | null,
private readonly sendGridWebhookService: SendGridWebhookService,
private readonly sweegoWebhookService: SweegoWebhookService,
) {}
async listGuildWebhooks(params: WebhookListGuildParams): Promise<Array<WebhookResponse>> {
@@ -339,15 +340,16 @@ export class WebhookRequestService {
return new Response(response.body, {status: response.status});
}
async handleSendGridWebhook(params: SendGridWebhookParams): Promise<Response> {
async handleSweegoWebhook(params: SweegoWebhookParams): Promise<Response> {
if (!Config.email.enabled) {
return new Response('Email not enabled', {status: 404});
}
const response = await this.sendGridWebhookService.handleWebhook({
const response = await this.sweegoWebhookService.handleWebhook({
body: params.body,
signature: params.signature,
webhookId: params.webhookId,
timestamp: params.timestamp,
signature: params.signature,
secret: Config.email.webhookSecret,
});
return new Response(response.body, {status: response.status});

View File

@@ -1,194 +0,0 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import crypto from 'node:crypto';
import {
type ISendGridGatewayService,
type ISendGridUserRepository,
type SendGridEvent,
SendGridWebhookService,
} from '@fluxer/api/src/webhook/SendGridWebhookService';
import {SuspiciousActivityFlags} from '@fluxer/constants/src/UserConstants';
import {describe, expect, it, vi} from 'vitest';
function createMockUserRepository(): ISendGridUserRepository {
return {
findByEmail: vi.fn().mockResolvedValue(null),
patchUpsert: vi.fn().mockResolvedValue(null),
};
}
function createMockGatewayService(): ISendGridGatewayService {
return {
dispatchPresence: vi.fn().mockResolvedValue(undefined),
};
}
function signPayload(privateKey: string, payload: string, timestamp: string): string {
const signer = crypto.createSign('sha256');
signer.update(timestamp + payload);
signer.end();
return signer.sign(privateKey).toString('base64');
}
describe('SendGridWebhookService', () => {
describe('verifySignature', () => {
it('returns true for valid signature with PEM key', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const {privateKey, publicKey} = crypto.generateKeyPairSync('ec', {
namedCurve: 'prime256v1',
privateKeyEncoding: {format: 'pem', type: 'pkcs8'},
publicKeyEncoding: {format: 'pem', type: 'spki'},
});
const payload = JSON.stringify({event: 'bounce', email: 'test@example.com'});
const timestamp = '1700000000';
const signature = signPayload(privateKey, payload, timestamp);
const result = service.verifySignature(payload, signature, timestamp, publicKey);
expect(result).toBe(true);
});
it('returns true for valid signature with base64 DER key', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const {privateKey, publicKey} = crypto.generateKeyPairSync('ec', {
namedCurve: 'prime256v1',
privateKeyEncoding: {format: 'pem', type: 'pkcs8'},
publicKeyEncoding: {format: 'der', type: 'spki'},
});
const payload = JSON.stringify({event: 'bounce', email: 'test@example.com'});
const timestamp = '1700000000';
const signature = signPayload(privateKey, payload, timestamp);
const publicKeyBase64 = publicKey.toString('base64');
const result = service.verifySignature(payload, signature, timestamp, publicKeyBase64);
expect(result).toBe(true);
});
it('returns false for invalid signature', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const {publicKey} = crypto.generateKeyPairSync('ec', {
namedCurve: 'prime256v1',
publicKeyEncoding: {format: 'pem', type: 'spki'},
privateKeyEncoding: {format: 'pem', type: 'pkcs8'},
});
const payload = JSON.stringify({event: 'bounce', email: 'test@example.com'});
const timestamp = '1700000000';
const signature = 'invalid-signature';
const result = service.verifySignature(payload, signature, timestamp, publicKey);
expect(result).toBe(false);
});
});
describe('processEvents', () => {
it('ignores non-bounce events', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const events: Array<SendGridEvent> = [{event: 'delivered', email: 'soft@example.com'}];
await service.processEvents(events);
expect(userRepo.findByEmail).not.toHaveBeenCalled();
expect(userRepo.patchUpsert).not.toHaveBeenCalled();
});
it('marks hard bounces as unverified', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const mockUser = {
id: BigInt(123),
email: 'bounced@example.com',
emailBounced: false,
emailVerified: true,
suspiciousActivityFlags: 0,
toRow: () => ({}),
};
(userRepo.findByEmail as ReturnType<typeof vi.fn>).mockResolvedValue(mockUser);
(userRepo.patchUpsert as ReturnType<typeof vi.fn>).mockResolvedValue({
...mockUser,
emailBounced: true,
emailVerified: false,
});
const events: Array<SendGridEvent> = [
{
event: 'bounce',
type: 'bounce',
email: 'bounced@example.com',
sg_event_id: 'event-1',
},
];
await service.processEvents(events);
expect(userRepo.findByEmail).toHaveBeenCalledWith('bounced@example.com');
expect(userRepo.patchUpsert).toHaveBeenCalledWith(
mockUser.id,
{
email_bounced: true,
email_verified: false,
suspicious_activity_flags: SuspiciousActivityFlags.REQUIRE_REVERIFIED_EMAIL,
},
mockUser.toRow(),
);
});
it('skips already bounced users', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SendGridWebhookService(userRepo, gateway);
const mockUser = {
id: BigInt(456),
email: 'already@example.com',
emailBounced: true,
suspiciousActivityFlags: 0,
toRow: () => ({}),
};
(userRepo.findByEmail as ReturnType<typeof vi.fn>).mockResolvedValue(mockUser);
const events: Array<SendGridEvent> = [
{
event: 'bounce',
type: 'bounce',
email: 'already@example.com',
},
];
await service.processEvents(events);
expect(userRepo.patchUpsert).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,198 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import crypto from 'node:crypto';
import {initializeLogger} from '@fluxer/api/src/Logger';
import {NoopLogger} from '@fluxer/api/src/test/mocks/NoopLogger';
import {
type ISweegoGatewayService,
type ISweegoUserRepository,
type SweegoEvent,
SweegoWebhookService,
} from '@fluxer/api/src/webhook/SweegoWebhookService';
import {SuspiciousActivityFlags} from '@fluxer/constants/src/UserConstants';
import {beforeAll, describe, expect, it, vi} from 'vitest';
beforeAll(() => {
initializeLogger(new NoopLogger());
});
function createMockUserRepository(): ISweegoUserRepository {
return {
findByEmail: vi.fn().mockResolvedValue(null),
patchUpsert: vi.fn().mockResolvedValue(null),
};
}
function createMockGatewayService(): ISweegoGatewayService {
return {
dispatchPresence: vi.fn().mockResolvedValue(undefined),
};
}
function signPayload(secret: string, webhookId: string, timestamp: string, body: string): string {
const secretBytes = Buffer.from(secret, 'base64');
const contentToSign = `${webhookId}.${timestamp}.${body}`;
return crypto.createHmac('sha256', secretBytes).update(contentToSign).digest('base64');
}
describe('SweegoWebhookService', () => {
const testSecret = crypto.randomBytes(32).toString('base64');
describe('verifySignature', () => {
it('returns true for valid HMAC-SHA256 signature', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const body = JSON.stringify({event_type: 'hard_bounce', recipient: 'test@example.com'});
const webhookId = '237e3736c687425d9ea8665216bcfe8a';
const timestamp = '1769696506';
const signature = signPayload(testSecret, webhookId, timestamp, body);
const result = service.verifySignature(body, webhookId, timestamp, signature, testSecret);
expect(result).toBe(true);
});
it('returns false for invalid signature', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const body = JSON.stringify({event_type: 'hard_bounce', recipient: 'test@example.com'});
const webhookId = '237e3736c687425d9ea8665216bcfe8a';
const timestamp = '1769696506';
const result = service.verifySignature(body, webhookId, timestamp, 'invalid-signature', testSecret);
expect(result).toBe(false);
});
it('returns false for tampered body', () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const originalBody = JSON.stringify({event_type: 'hard_bounce', recipient: 'test@example.com'});
const webhookId = '237e3736c687425d9ea8665216bcfe8a';
const timestamp = '1769696506';
const signature = signPayload(testSecret, webhookId, timestamp, originalBody);
const tamperedBody = JSON.stringify({event_type: 'hard_bounce', recipient: 'other@example.com'});
const result = service.verifySignature(tamperedBody, webhookId, timestamp, signature, testSecret);
expect(result).toBe(false);
});
});
describe('processEvent', () => {
it('ignores non-bounce events', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const event: SweegoEvent = {
event_type: 'delivered',
timestamp: '2026-01-29T14:21:46.729251+00:00',
recipient: 'soft@example.com',
};
await service.processEvent(event);
expect(userRepo.findByEmail).not.toHaveBeenCalled();
expect(userRepo.patchUpsert).not.toHaveBeenCalled();
});
it('logs soft bounces without marking as bounced', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const event: SweegoEvent = {
event_type: 'soft-bounce',
timestamp: '2026-01-29T14:21:46.729251+00:00',
recipient: 'soft@example.com',
details: 'Temporary failure',
};
await service.processEvent(event);
expect(userRepo.findByEmail).not.toHaveBeenCalled();
expect(userRepo.patchUpsert).not.toHaveBeenCalled();
});
it('marks hard bounces as unverified', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const mockUser = {
id: BigInt(123),
email: 'bounced@example.com',
emailBounced: false,
emailVerified: true,
suspiciousActivityFlags: 0,
toRow: () => ({}),
};
(userRepo.findByEmail as ReturnType<typeof vi.fn>).mockResolvedValue(mockUser);
(userRepo.patchUpsert as ReturnType<typeof vi.fn>).mockResolvedValue(null);
const event: SweegoEvent = {
event_type: 'hard_bounce',
timestamp: '2026-01-29T14:21:46.729251+00:00',
recipient: 'bounced@example.com',
event_id: 'event-1',
};
await service.processEvent(event);
expect(userRepo.findByEmail).toHaveBeenCalledWith('bounced@example.com');
expect(userRepo.patchUpsert).toHaveBeenCalledWith(
mockUser.id,
{
email_bounced: true,
email_verified: false,
suspicious_activity_flags: SuspiciousActivityFlags.REQUIRE_REVERIFIED_EMAIL,
},
mockUser.toRow(),
);
});
it('skips already bounced users', async () => {
const userRepo = createMockUserRepository();
const gateway = createMockGatewayService();
const service = new SweegoWebhookService(userRepo, gateway);
const mockUser = {
id: BigInt(456),
email: 'already@example.com',
emailBounced: true,
suspiciousActivityFlags: 0,
toRow: () => ({}),
};
(userRepo.findByEmail as ReturnType<typeof vi.fn>).mockResolvedValue(mockUser);
const event: SweegoEvent = {
event_type: 'hard_bounce',
timestamp: '2026-01-29T14:21:46.729251+00:00',
recipient: 'already@example.com',
};
await service.processEvent(event);
expect(userRepo.patchUpsert).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,162 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {JetStreamWorkerQueue} from '@fluxer/api/src/worker/JetStreamWorkerQueue';
import type {LoggerInterface} from '@fluxer/logger/src/LoggerInterface';
import type {WorkerJobPayload} from '@fluxer/worker/src/contracts/WorkerTypes';
interface CronDefinition {
id: string;
taskType: string;
payload: WorkerJobPayload;
cronExpression: string;
lastFired: number;
}
function parseCronField(field: string, min: number, max: number): Array<number> {
if (field === '*') {
return [];
}
const values: Array<number> = [];
for (const part of field.split(',')) {
const stepMatch = part.match(/^(.+)\/(\d+)$/);
if (stepMatch) {
const [, range, stepStr] = stepMatch;
const step = Number.parseInt(stepStr!, 10);
let start = min;
let end = max;
if (range !== '*') {
const rangeParts = range!.split('-');
start = Number.parseInt(rangeParts[0]!, 10);
if (rangeParts.length > 1) {
end = Number.parseInt(rangeParts[1]!, 10);
}
}
for (let i = start; i <= end; i += step) {
values.push(i);
}
} else if (part.includes('-')) {
const [startStr, endStr] = part.split('-');
const start = Number.parseInt(startStr!, 10);
const end = Number.parseInt(endStr!, 10);
for (let i = start; i <= end; i++) {
values.push(i);
}
} else {
values.push(Number.parseInt(part, 10));
}
}
return values;
}
function matchesCronExpression(expression: string, date: Date): boolean {
const parts = expression.trim().split(/\s+/);
if (parts.length !== 6) {
return false;
}
const [secField, minField, hourField, domField, monField, dowField] = parts;
const second = date.getSeconds();
const minute = date.getMinutes();
const hour = date.getHours();
const dayOfMonth = date.getDate();
const month = date.getMonth() + 1;
const dayOfWeek = date.getDay();
function matches(field: string, value: number, min: number, max: number): boolean {
const allowed = parseCronField(field, min, max);
return allowed.length === 0 || allowed.includes(value);
}
return (
matches(secField!, second, 0, 59) &&
matches(minField!, minute, 0, 59) &&
matches(hourField!, hour, 0, 23) &&
matches(domField!, dayOfMonth, 1, 31) &&
matches(monField!, month, 1, 12) &&
matches(dowField!, dayOfWeek, 0, 6)
);
}
export class CronScheduler {
private readonly queue: JetStreamWorkerQueue;
private readonly logger: LoggerInterface;
private readonly definitions: Map<string, CronDefinition> = new Map();
private intervalId: ReturnType<typeof setInterval> | null = null;
constructor(queue: JetStreamWorkerQueue, logger: LoggerInterface) {
this.queue = queue;
this.logger = logger;
}
upsert(id: string, taskType: string, payload: WorkerJobPayload, cronExpression: string): void {
this.definitions.set(id, {
id,
taskType,
payload,
cronExpression,
lastFired: 0,
});
}
start(): void {
if (this.intervalId !== null) {
return;
}
this.intervalId = setInterval(() => {
this.tick().catch((error) => {
this.logger.error({err: error}, 'Cron scheduler tick failed');
});
}, 1000);
this.logger.info(`Cron scheduler started with ${this.definitions.size} definitions`);
}
stop(): void {
if (this.intervalId !== null) {
clearInterval(this.intervalId);
this.intervalId = null;
}
}
private async tick(): Promise<void> {
const now = new Date();
const nowSeconds = Math.floor(now.getTime() / 1000);
for (const def of this.definitions.values()) {
if (def.lastFired === nowSeconds) {
continue;
}
if (matchesCronExpression(def.cronExpression, now)) {
def.lastFired = nowSeconds;
try {
await this.queue.enqueue(def.taskType, def.payload);
this.logger.debug({cronId: def.id, taskType: def.taskType}, 'Cron job fired');
} catch (error) {
this.logger.error({err: error, cronId: def.id, taskType: def.taskType}, 'Failed to enqueue cron job');
}
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More