refactor progress

This commit is contained in:
Hampus Kraft
2026-02-17 12:22:36 +00:00
parent cb31608523
commit d5abd1a7e4
8257 changed files with 1190207 additions and 761040 deletions

View File

@@ -0,0 +1,142 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {
CreateAdminApiKeyData,
IAdminApiKeyRepository,
} from '@fluxer/api/src/admin/repositories/IAdminApiKeyRepository';
import type {UserID} from '@fluxer/api/src/BrandedTypes';
import {BatchBuilder, Db, fetchMany, fetchOne, upsertOne} from '@fluxer/api/src/database/Cassandra';
import type {AdminApiKeyRow} from '@fluxer/api/src/database/types/AdminAuthTypes';
import {AdminApiKey} from '@fluxer/api/src/models/AdminApiKey';
import {AdminApiKeys, AdminApiKeysByCreator} from '@fluxer/api/src/Tables';
import {hashPassword} from '@fluxer/api/src/utils/PasswordUtils';
function computeTtlSeconds(expiresAt: Date): number {
const diffSeconds = Math.floor((expiresAt.getTime() - Date.now()) / 1000);
return Math.max(diffSeconds, 1);
}
export class AdminApiKeyRepository implements IAdminApiKeyRepository {
async create(data: CreateAdminApiKeyData, createdBy: UserID, keyId: bigint, rawKey: string): Promise<AdminApiKey> {
const keyHash = await hashPassword(rawKey);
const createdAt = new Date();
const row: AdminApiKeyRow = {
key_id: keyId,
key_hash: keyHash,
name: data.name,
created_by_user_id: createdBy,
created_at: createdAt,
last_used_at: null,
expires_at: data.expiresAt,
version: 1,
acls: data.acls,
};
const batch = new BatchBuilder();
if (data.expiresAt) {
const ttlSeconds = computeTtlSeconds(data.expiresAt);
batch.addPrepared(AdminApiKeys.insertWithTtl(row, ttlSeconds));
batch.addPrepared(
AdminApiKeysByCreator.insertWithTtl(
{
created_by_user_id: row.created_by_user_id,
key_id: row.key_id,
created_at: row.created_at,
name: row.name,
expires_at: row.expires_at,
last_used_at: row.last_used_at,
version: row.version,
acls: row.acls,
},
ttlSeconds,
),
);
} else {
batch.addPrepared(AdminApiKeys.upsertAll(row));
batch.addPrepared(
AdminApiKeysByCreator.upsertAll({
created_by_user_id: row.created_by_user_id,
key_id: row.key_id,
created_at: row.created_at,
name: row.name,
expires_at: row.expires_at,
last_used_at: row.last_used_at,
version: row.version,
acls: row.acls,
}),
);
}
await batch.execute();
return new AdminApiKey(row);
}
async findById(keyId: bigint): Promise<AdminApiKey | null> {
const query = AdminApiKeys.select({
where: AdminApiKeys.where.eq('key_id'),
limit: 1,
});
const row = await fetchOne<AdminApiKeyRow>(query.bind({key_id: keyId}));
if (!row) {
return null;
}
return new AdminApiKey(row);
}
async listByCreator(createdBy: UserID): Promise<Array<AdminApiKey>> {
const query = AdminApiKeysByCreator.select({
where: AdminApiKeysByCreator.where.eq('created_by_user_id'),
});
const indexRows = await fetchMany<{
created_by_user_id: UserID;
key_id: bigint;
}>(query.bind({created_by_user_id: createdBy}));
if (indexRows.length === 0) {
return [];
}
const apiKeys = await Promise.all(indexRows.map((row) => this.findById(row.key_id)));
return apiKeys.filter((key) => key !== null) as Array<AdminApiKey>;
}
async updateLastUsed(keyId: bigint): Promise<void> {
const patchQuery = AdminApiKeys.patchByPk({key_id: keyId}, {last_used_at: Db.set(new Date())});
await upsertOne(patchQuery);
}
async revoke(keyId: bigint, createdBy: UserID): Promise<void> {
const batch = new BatchBuilder();
batch.addPrepared(AdminApiKeys.deleteByPk({key_id: keyId}));
batch.addPrepared(AdminApiKeysByCreator.deleteByPk({created_by_user_id: createdBy, key_id: keyId}));
await batch.execute();
}
}

View File

@@ -0,0 +1,392 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ArchiveSubjectType} from '@fluxer/api/src/admin/models/AdminArchiveModel';
import {AdminArchive} from '@fluxer/api/src/admin/models/AdminArchiveModel';
import {BatchBuilder, Db, fetchMany, fetchOne} from '@fluxer/api/src/database/Cassandra';
import type {AdminArchiveRow} from '@fluxer/api/src/database/types/AdminArchiveTypes';
import {Logger} from '@fluxer/api/src/Logger';
import {AdminArchivesByRequester, AdminArchivesBySubject, AdminArchivesByType} from '@fluxer/api/src/Tables';
import {ms} from 'itty-time';
const RETENTION_DAYS = 365;
const DEFAULT_RETENTION_MS = ms(`${RETENTION_DAYS} days`);
function computeTtlSeconds(expiresAt: Date): number {
const diffSeconds = Math.floor((expiresAt.getTime() - Date.now()) / 1000);
return Math.max(diffSeconds, 1);
}
function filterExpired(rows: Array<AdminArchiveRow>, includeExpired: boolean): Array<AdminArchiveRow> {
if (includeExpired) return rows;
const now = Date.now();
return rows.filter((row) => !row.expires_at || row.expires_at.getTime() > now);
}
export class AdminArchiveRepository {
private ensureExpiry(archive: AdminArchive): AdminArchive {
if (!archive.expiresAt) {
archive.expiresAt = new Date(Date.now() + DEFAULT_RETENTION_MS);
}
return archive;
}
async create(archive: AdminArchive): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const row = withExpiry.toRow();
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
batch.addPrepared(
AdminArchivesByRequester.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
batch.addPrepared(
AdminArchivesByType.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
await batch.execute();
Logger.debug(
{subjectType: withExpiry.subjectType, subjectId: withExpiry.subjectId, archiveId: withExpiry.archiveId},
'Created admin archive record',
);
}
async update(archive: AdminArchive): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const row = withExpiry.toRow();
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
batch.addPrepared(
AdminArchivesByRequester.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
batch.addPrepared(
AdminArchivesByType.insertWithTtlParam({...row, ttl_seconds: ttlSeconds} as AdminArchiveRow, 'ttl_seconds'),
);
await batch.execute();
Logger.debug(
{subjectType: withExpiry.subjectType, subjectId: withExpiry.subjectId, archiveId: withExpiry.archiveId},
'Updated admin archive record',
);
}
async markAsStarted(archive: AdminArchive, progressStep = 'Starting archive'): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
subject_id: withExpiry.subjectId,
archive_id: withExpiry.archiveId,
},
{
started_at: Db.set(new Date()),
progress_percent: Db.set(0),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByRequester.patchByPkWithTtlParam(
{
requested_by: withExpiry.requestedBy,
archive_id: withExpiry.archiveId,
},
{
started_at: Db.set(new Date()),
progress_percent: Db.set(0),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByType.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
archive_id: withExpiry.archiveId,
},
{
started_at: Db.set(new Date()),
progress_percent: Db.set(0),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
await batch.execute();
}
async updateProgress(archive: AdminArchive, progressPercent: number, progressStep: string): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
subject_id: withExpiry.subjectId,
archive_id: withExpiry.archiveId,
},
{
progress_percent: Db.set(progressPercent),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByRequester.patchByPkWithTtlParam(
{
requested_by: withExpiry.requestedBy,
archive_id: withExpiry.archiveId,
},
{
progress_percent: Db.set(progressPercent),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByType.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
archive_id: withExpiry.archiveId,
},
{
progress_percent: Db.set(progressPercent),
progress_step: Db.set(progressStep),
},
'ttl_seconds',
ttlSeconds,
),
);
await batch.execute();
Logger.debug({archiveId: withExpiry.archiveId, progressPercent, progressStep}, 'Updated admin archive progress');
}
async markAsCompleted(
archive: AdminArchive,
storageKey: string,
fileSize: bigint,
downloadUrlExpiresAt: Date,
): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
subject_id: withExpiry.subjectId,
archive_id: withExpiry.archiveId,
},
{
completed_at: Db.set(new Date()),
storage_key: Db.set(storageKey),
file_size: Db.set(fileSize),
download_url_expires_at: Db.set(downloadUrlExpiresAt),
progress_percent: Db.set(100),
progress_step: Db.set('Completed'),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByRequester.patchByPkWithTtlParam(
{
requested_by: withExpiry.requestedBy,
archive_id: withExpiry.archiveId,
},
{
completed_at: Db.set(new Date()),
storage_key: Db.set(storageKey),
file_size: Db.set(fileSize),
download_url_expires_at: Db.set(downloadUrlExpiresAt),
progress_percent: Db.set(100),
progress_step: Db.set('Completed'),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByType.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
archive_id: withExpiry.archiveId,
},
{
completed_at: Db.set(new Date()),
storage_key: Db.set(storageKey),
file_size: Db.set(fileSize),
download_url_expires_at: Db.set(downloadUrlExpiresAt),
progress_percent: Db.set(100),
progress_step: Db.set('Completed'),
},
'ttl_seconds',
ttlSeconds,
),
);
await batch.execute();
}
async markAsFailed(archive: AdminArchive, errorMessage: string): Promise<void> {
const withExpiry = this.ensureExpiry(archive);
const ttlSeconds = computeTtlSeconds(withExpiry.expiresAt!);
const batch = new BatchBuilder();
batch.addPrepared(
AdminArchivesBySubject.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
subject_id: withExpiry.subjectId,
archive_id: withExpiry.archiveId,
},
{
failed_at: Db.set(new Date()),
error_message: Db.set(errorMessage),
progress_step: Db.set('Failed'),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByRequester.patchByPkWithTtlParam(
{
requested_by: withExpiry.requestedBy,
archive_id: withExpiry.archiveId,
},
{
failed_at: Db.set(new Date()),
error_message: Db.set(errorMessage),
progress_step: Db.set('Failed'),
},
'ttl_seconds',
ttlSeconds,
),
);
batch.addPrepared(
AdminArchivesByType.patchByPkWithTtlParam(
{
subject_type: withExpiry.subjectType,
archive_id: withExpiry.archiveId,
},
{
failed_at: Db.set(new Date()),
error_message: Db.set(errorMessage),
progress_step: Db.set('Failed'),
},
'ttl_seconds',
ttlSeconds,
),
);
await batch.execute();
}
async findBySubjectAndArchiveId(
subjectType: ArchiveSubjectType,
subjectId: bigint,
archiveId: bigint,
): Promise<AdminArchive | null> {
const query = AdminArchivesBySubject.select({
where: [
AdminArchivesBySubject.where.eq('subject_type'),
AdminArchivesBySubject.where.eq('subject_id'),
AdminArchivesBySubject.where.eq('archive_id'),
],
limit: 1,
});
const row = await fetchOne<AdminArchiveRow>(
query.bind({
subject_type: subjectType,
subject_id: subjectId,
archive_id: archiveId,
}),
);
return row ? new AdminArchive(row) : null;
}
async listBySubject(
subjectType: ArchiveSubjectType,
subjectId: bigint,
limit = 20,
includeExpired = false,
): Promise<Array<AdminArchive>> {
const query = AdminArchivesBySubject.select({
where: [AdminArchivesBySubject.where.eq('subject_type'), AdminArchivesBySubject.where.eq('subject_id')],
limit,
});
const rows = await fetchMany<AdminArchiveRow>(
query.bind({
subject_type: subjectType,
subject_id: subjectId,
}),
);
return filterExpired(rows, includeExpired).map((row) => new AdminArchive(row));
}
async listByType(subjectType: ArchiveSubjectType, limit = 50, includeExpired = false): Promise<Array<AdminArchive>> {
const query = AdminArchivesByType.select({
where: AdminArchivesByType.where.eq('subject_type'),
limit,
});
const rows = await fetchMany<AdminArchiveRow>(
query.bind({
subject_type: subjectType,
}),
);
return filterExpired(rows, includeExpired).map((row) => new AdminArchive(row));
}
async listByRequester(requestedBy: bigint, limit = 50, includeExpired = false): Promise<Array<AdminArchive>> {
const query = AdminArchivesByRequester.select({
where: AdminArchivesByRequester.where.eq('requested_by'),
limit,
});
const rows = await fetchMany<AdminArchiveRow>(
query.bind({
requested_by: requestedBy,
}),
);
return filterExpired(rows, includeExpired).map((row) => new AdminArchive(row));
}
}

View File

@@ -0,0 +1,35 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {UserID} from '@fluxer/api/src/BrandedTypes';
import type {AdminApiKey} from '@fluxer/api/src/models/AdminApiKey';
export interface CreateAdminApiKeyData {
name: string;
expiresAt: Date | null;
acls: Set<string>;
}
export interface IAdminApiKeyRepository {
create(data: CreateAdminApiKeyData, createdBy: UserID, keyId: bigint, rawKey: string): Promise<AdminApiKey>;
findById(keyId: bigint): Promise<AdminApiKey | null>;
listByCreator(createdBy: UserID): Promise<Array<AdminApiKey>>;
updateLastUsed(keyId: bigint): Promise<void>;
revoke(keyId: bigint, createdBy: UserID): Promise<void>;
}

View File

@@ -0,0 +1,106 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {CassandraParam, DbOp} from '@fluxer/api/src/database/Cassandra';
import {fetchMany, fetchOne, upsertOne} from '@fluxer/api/src/database/Cassandra';
import type {SystemDmJobRow} from '@fluxer/api/src/database/types/SystemDmJobTypes';
import {SystemDmJobs} from '@fluxer/api/src/Tables';
const JOB_TYPE = 'system_dm';
const FETCH_JOB_BY_ID = SystemDmJobs.select({
where: [SystemDmJobs.where.eq('job_type'), SystemDmJobs.where.eq('job_id')],
});
export class SystemDmJobRepository {
async createJob(job: SystemDmJobRow): Promise<void> {
await upsertOne(SystemDmJobs.insert(job));
}
async getJob(jobId: bigint): Promise<SystemDmJobRow | null> {
return fetchOne<SystemDmJobRow>(
FETCH_JOB_BY_ID.bind({
job_type: JOB_TYPE,
job_id: jobId,
}),
);
}
async listJobs(limit: number, beforeJobId?: bigint): Promise<Array<SystemDmJobRow>> {
const whereClauses = [SystemDmJobs.where.eq('job_type')];
const params: Record<string, CassandraParam> = {job_type: JOB_TYPE};
if (beforeJobId) {
whereClauses.push(SystemDmJobs.where.lt('job_id', 'before_job_id'));
params['before_job_id'] = beforeJobId;
}
const stmt = SystemDmJobs.select({
where: whereClauses,
orderBy: {col: 'job_id', direction: 'DESC'},
limit,
});
return fetchMany<SystemDmJobRow>(stmt.bind(params));
}
async patchJob(jobId: bigint, patch: Partial<SystemDmJobRow>): Promise<void> {
const patchOps = SystemDmJobRepository.buildPatch(patch);
if (Object.keys(patchOps).length === 0) {
return;
}
const query = SystemDmJobs.patchByPk(
{job_type: JOB_TYPE, job_id: jobId},
patchOps as Partial<{[K in Exclude<keyof SystemDmJobRow, 'job_type' | 'job_id'>]: DbOp<SystemDmJobRow[K]>}>,
);
await upsertOne(query);
}
private static buildPatch(patch: Partial<SystemDmJobRow>): Partial<{
[K in Exclude<keyof SystemDmJobRow, 'job_type' | 'job_id'>]: {
kind: 'set';
value: SystemDmJobRow[K];
};
}> {
const patchOps: Partial<{
[K in Exclude<keyof SystemDmJobRow, 'job_type' | 'job_id'>]: {
kind: 'set';
value: SystemDmJobRow[K];
};
}> = {};
for (const key of Object.keys(patch) as Array<keyof SystemDmJobRow>) {
if (key === 'job_type' || key === 'job_id') {
continue;
}
const value = patch[key];
if (value === undefined) {
continue;
}
const fieldKey = key as Exclude<keyof SystemDmJobRow, 'job_type' | 'job_id'>;
(patchOps as Record<string, {kind: 'set'; value: SystemDmJobRow[keyof SystemDmJobRow]}>)[fieldKey] = {
kind: 'set',
value: value as SystemDmJobRow[typeof fieldKey],
};
}
return patchOps;
}
}