refactor progress

This commit is contained in:
Hampus Kraft
2026-02-17 12:22:36 +00:00
parent cb31608523
commit d5abd1a7e4
8257 changed files with 1190207 additions and 761040 deletions

View File

@@ -0,0 +1,613 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import crypto from 'node:crypto';
import fs from 'node:fs';
import net from 'node:net';
import path from 'node:path';
import {parseArgs} from 'node:util';
import cassandra from 'cassandra-driver';
const MIGRATION_TABLE = 'schema_migrations';
const MIGRATION_KEYSPACE = process.env['CASSANDRA_KEYSPACE'] ?? 'fluxer';
const MIGRATIONS_DIR = '../fluxer_devops/cassandra/migrations';
interface ForbiddenPattern {
pattern: RegExp;
message: string;
}
const FORBIDDEN_PATTERNS: Array<ForbiddenPattern> = [
{pattern: /\bCREATE\s+INDEX\b/i, message: 'Secondary indexes are forbidden (CREATE INDEX)'},
{pattern: /\bCREATE\s+CUSTOM\s+INDEX\b/i, message: 'Custom indexes are forbidden (CREATE CUSTOM INDEX)'},
{
pattern: /\bCREATE\s+MATERIALIZED\s+VIEW\b/i,
message: 'Materialized views are forbidden (CREATE MATERIALIZED VIEW)',
},
{pattern: /\bDROP\s+TABLE\b/i, message: 'DROP TABLE is forbidden'},
{pattern: /\bDROP\s+KEYSPACE\b/i, message: 'DROP KEYSPACE is forbidden'},
{pattern: /\bDROP\s+TYPE\b/i, message: 'DROP TYPE is forbidden'},
{pattern: /\bDROP\s+INDEX\b/i, message: 'DROP INDEX is forbidden'},
{pattern: /\bDROP\s+MATERIALIZED\s+VIEW\b/i, message: 'DROP MATERIALIZED VIEW is forbidden'},
{pattern: /\bDROP\s+COLUMN\b/i, message: 'DROP COLUMN is forbidden (use ALTER TABLE ... DROP ...)'},
{pattern: /\bTRUNCATE\b/i, message: 'TRUNCATE is forbidden'},
];
function getMigrationsDir(): string {
return MIGRATIONS_DIR;
}
function getMigrationPath(filename: string): string {
return path.join(getMigrationsDir(), filename);
}
function sanitizeName(name: string): string {
let result = name.replace(/ /g, '_').replace(/-/g, '_').toLowerCase();
result = result
.split('')
.filter((c) => /[a-z0-9_]/.test(c))
.join('');
while (result.includes('__')) {
result = result.replace(/__/g, '_');
}
return result.replace(/^_+|_+$/g, '');
}
function removeComments(content: string): string {
return content
.split('\n')
.map((line) => {
const idx = line.indexOf('--');
return idx !== -1 ? line.slice(0, idx) : line;
})
.map((line) => line.trim())
.filter((line) => line.length > 0)
.join('\n');
}
function parseStatements(content: string): Array<string> {
const statements: Array<string> = [];
let currentStatement = '';
for (const line of content.split('\n')) {
const cleanLine = line.includes('--') ? line.slice(0, line.indexOf('--')) : line;
const trimmed = cleanLine.trim();
if (trimmed.length === 0) {
continue;
}
currentStatement += `${trimmed} `;
if (trimmed.endsWith(';')) {
const statement = currentStatement.trim();
if (statement.length > 0) {
statements.push(statement);
}
currentStatement = '';
}
}
if (currentStatement.trim().length > 0) {
statements.push(currentStatement.trim());
}
return statements;
}
function calculateChecksum(content: string): string {
return crypto.createHash('md5').update(content).digest('hex');
}
function validateMigrationContent(filename: string, content: string): Array<string> {
const errors: Array<string> = [];
const cleanContent = removeComments(content);
for (const forbidden of FORBIDDEN_PATTERNS) {
if (forbidden.pattern.test(cleanContent)) {
errors.push(` ${filename}: ${forbidden.message}`);
}
}
if (cleanContent.trim().length === 0) {
errors.push(` ${filename}: migration file is empty`);
}
return errors;
}
function getMigrationFiles(): Array<string> {
const migrationsDir = getMigrationsDir();
if (!fs.existsSync(migrationsDir)) {
return [];
}
const files = fs.readdirSync(migrationsDir);
const migrations = files
.filter((file) => {
const filePath = path.join(migrationsDir, file);
return fs.statSync(filePath).isFile() && file.endsWith('.cql');
})
.sort();
return migrations;
}
function hasSkipCi(filename: string): boolean {
const content = fs.readFileSync(getMigrationPath(filename), 'utf-8');
const lines = content.split('\n').slice(0, 10);
for (const line of lines) {
const lower = line.trim().toLowerCase();
if (lower.includes('-- skip ci') || lower.includes('--skip ci')) {
return true;
}
}
return false;
}
async function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
async function createSession(
host: string,
port: number,
username: string,
password: string,
): Promise<cassandra.Client> {
const maxRetries = 5;
const retryDelay = 10000;
let lastError: Error | null = null;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
if (attempt > 1) {
console.log(`Retrying connection (attempt ${attempt}/${maxRetries})...`);
}
try {
const client = new cassandra.Client({
contactPoints: [`${host}:${port}`],
localDataCenter: 'dc1',
credentials: {username, password},
socketOptions: {connectTimeout: 60000},
});
await client.connect();
return client;
} catch (e) {
lastError = e instanceof Error ? e : new Error(String(e));
console.log(`Connection attempt ${attempt}/${maxRetries} failed: ${lastError.message}`);
if (attempt < maxRetries) {
await sleep(retryDelay);
}
}
}
throw new Error(`Failed to connect to Cassandra after ${maxRetries} attempts: ${lastError?.message}`);
}
async function getAppliedMigrations(session: cassandra.Client): Promise<Map<string, Date>> {
const applied = new Map<string, Date>();
const result = await session.execute(`SELECT filename, applied_at FROM ${MIGRATION_KEYSPACE}.${MIGRATION_TABLE}`);
for (const row of result.rows) {
const filename = row.filename as string;
const appliedAt = row.applied_at as Date;
applied.set(filename, appliedAt);
}
return applied;
}
async function applyMigration(session: cassandra.Client, filename: string): Promise<void> {
console.log(`Applying migration: ${filename}`);
const content = fs.readFileSync(getMigrationPath(filename), 'utf-8');
const statements = parseStatements(content);
if (statements.length === 0) {
throw new Error('No valid statements found in migration');
}
console.log(` Executing ${statements.length} statement(s)...`);
for (let i = 0; i < statements.length; i++) {
console.log(` [${i + 1}/${statements.length}] Executing...`);
await session.execute(statements[i]);
}
const checksum = calculateChecksum(content);
await session.execute(
`INSERT INTO ${MIGRATION_KEYSPACE}.${MIGRATION_TABLE} (filename, applied_at, checksum) VALUES (?, ?, ?)`,
[filename, new Date(), checksum],
);
console.log(' \u2713 Migration applied successfully');
}
async function autoAcknowledgeMigration(session: cassandra.Client, filename: string): Promise<void> {
const content = fs.readFileSync(getMigrationPath(filename), 'utf-8');
const checksum = calculateChecksum(content);
await session.execute(
`INSERT INTO ${MIGRATION_KEYSPACE}.${MIGRATION_TABLE} (filename, applied_at, checksum) VALUES (?, ?, ?)`,
[filename, new Date(), checksum],
);
}
function createMigration(name: string): void {
const sanitized = sanitizeName(name);
if (sanitized.length === 0) {
throw new Error(`Invalid migration name: ${name}`);
}
const timestamp = new Date().toISOString().replace(/[-:T]/g, '').slice(0, 14);
const filename = `${timestamp}_${sanitized}.cql`;
const filepath = getMigrationPath(filename);
if (fs.existsSync(filepath)) {
throw new Error(`Migration file already exists: ${filename}`);
}
fs.writeFileSync(filepath, '');
console.log(`\u2713 Created migration: ${filename}`);
console.log(` Path: ${filepath}`);
}
function checkMigrations(): void {
const migrations = getMigrationFiles();
if (migrations.length === 0) {
console.log('No migration files found');
return;
}
console.log(`Checking ${migrations.length} migration file(s)...\n`);
const errors: Array<string> = [];
let validCount = 0;
for (const migration of migrations) {
const content = fs.readFileSync(getMigrationPath(migration), 'utf-8');
const fileErrors = validateMigrationContent(migration, content);
if (fileErrors.length === 0) {
validCount++;
console.log(`\u2713 ${migration}`);
} else {
errors.push(...fileErrors);
}
}
if (errors.length > 0) {
console.log('\nValidation errors:');
for (const error of errors) {
console.log(`\u2717 ${error}`);
}
throw new Error(`Validation failed with ${errors.length} error(s)`);
}
console.log(`\n\u2713 All ${validCount} migration(s) are valid!`);
}
async function runMigrations(host: string, port: number, username: string, password: string): Promise<void> {
console.log('Starting Cassandra migration process...');
console.log(`Host: ${host}, Port: ${port}`);
const session = await createSession(host, port, username, password);
try {
const migrations = getMigrationFiles();
const applied = await getAppliedMigrations(session);
if (migrations.length === 0) {
console.log('No migration files found');
return;
}
const pending: Array<string> = [];
const skipped: Array<string> = [];
for (const migration of migrations) {
if (!applied.has(migration)) {
if (hasSkipCi(migration)) {
skipped.push(migration);
} else {
pending.push(migration);
}
}
}
if (skipped.length > 0) {
console.log(`Found ${skipped.length} migration(s) with '-- skip ci' annotation:`);
for (const migration of skipped) {
console.log(` - ${migration}`);
}
console.log('\nAuto-acknowledging skipped migrations...');
for (const migration of skipped) {
await autoAcknowledgeMigration(session, migration);
console.log(` \u2713 Acknowledged: ${migration}`);
}
console.log();
}
if (pending.length === 0) {
console.log('\u2713 No pending migrations');
return;
}
console.log(`Found ${pending.length} pending migration(s) to apply:`);
for (const migration of pending) {
console.log(` - ${migration}`);
}
console.log();
const pendingCount = pending.length;
for (const migration of pending) {
await applyMigration(session, migration);
}
console.log(`\u2713 Successfully applied ${pendingCount} migration(s)`);
} finally {
await session.shutdown();
}
}
async function showStatus(host: string, port: number, username: string, password: string): Promise<void> {
const session = await createSession(host, port, username, password);
try {
const migrations = getMigrationFiles();
const applied = await getAppliedMigrations(session);
console.log('Migration Status');
console.log('================\n');
console.log(`Total migrations: ${migrations.length}`);
console.log(`Applied: ${applied.size}`);
console.log(`Pending: ${migrations.length - applied.size}\n`);
if (migrations.length > 0) {
console.log('Migrations:');
for (const migration of migrations) {
const status = applied.has(migration) ? '[\u2713]' : '[ ]';
const suffix = hasSkipCi(migration) ? ' (skip ci)' : '';
console.log(` ${status} ${migration}${suffix}`);
}
}
} finally {
await session.shutdown();
}
}
async function acknowledgeMigration(
host: string,
port: number,
username: string,
password: string,
filename: string,
): Promise<void> {
const session = await createSession(host, port, username, password);
try {
const applied = await getAppliedMigrations(session);
if (applied.has(filename)) {
throw new Error(`Migration ${filename} is already applied`);
}
const content = fs.readFileSync(getMigrationPath(filename), 'utf-8');
const checksum = calculateChecksum(content);
await session.execute(
`INSERT INTO ${MIGRATION_KEYSPACE}.${MIGRATION_TABLE} (filename, applied_at, checksum) VALUES (?, ?, ?)`,
[filename, new Date(), checksum],
);
console.log(`\u2713 Migration acknowledged: ${filename}`);
} finally {
await session.shutdown();
}
}
async function testConnection(host: string, port: number, username: string, password: string): Promise<void> {
console.log(`Testing Cassandra connection to ${host}:${port}...`);
const session = await createSession(host, port, username, password);
try {
const result = await session.execute('SELECT release_version FROM system.local');
if (result.rows.length > 0) {
const version = result.rows[0].release_version;
console.log(`\u2713 Connection successful - Cassandra version: ${version}`);
} else {
console.log('\u2713 Connection successful');
}
} finally {
await session.shutdown();
}
}
async function debugConnection(host: string, port: number, username: string, password: string): Promise<void> {
console.log('=== Cassandra Connection Debug ===');
console.log(`Host: ${host}:${port}`);
console.log(`Username: ${username}`);
console.log('\n[1/3] Testing TCP connectivity...');
const tcpStart = performance.now();
try {
await new Promise<void>((resolve, reject) => {
const socket = new net.Socket();
const timeout = setTimeout(() => {
socket.destroy();
reject(new Error('TCP connection timed out'));
}, 5000);
socket.connect(port, host, () => {
clearTimeout(timeout);
socket.destroy();
resolve();
});
socket.on('error', (err) => {
clearTimeout(timeout);
reject(err);
});
});
console.log(` \u2713 TCP connection successful (${((performance.now() - tcpStart) / 1000).toFixed(2)}s)`);
} catch (e) {
console.log(` \u2717 TCP connection failed: ${e instanceof Error ? e.message : String(e)}`);
throw e;
}
console.log('\n[2/3] Creating Cassandra session...');
const sessionStart = performance.now();
let session: cassandra.Client;
try {
session = await createSession(host, port, username, password);
console.log(` \u2713 Session created (${((performance.now() - sessionStart) / 1000).toFixed(2)}s)`);
} catch (e) {
console.log(` \u2717 Session creation failed: ${e instanceof Error ? e.message : String(e)}`);
throw e;
}
try {
console.log('\n[3/3] Testing queries...');
const queryStart = performance.now();
const result = await session.execute('SELECT release_version FROM system.local');
if (result.rows.length > 0) {
const version = result.rows[0].release_version;
console.log(` \u2713 Cassandra version: ${version} (${((performance.now() - queryStart) / 1000).toFixed(2)}s)`);
} else {
console.log(` \u2713 Query successful (${((performance.now() - queryStart) / 1000).toFixed(2)}s)`);
}
console.log('\n\u2713 All debug checks passed');
} finally {
await session.shutdown();
}
}
function printUsage(): void {
console.log(`cassandra-migrate - Forward-only Cassandra migration tool for Fluxer
A simple, forward-only migration tool for Cassandra.
Migrations are stored in fluxer_devops/cassandra/migrations.
Migration metadata is stored in the 'fluxer' keyspace.
USAGE:
tsx scripts/CassandraMigrate.tsx <command> [options]
COMMANDS:
create <name> Create a new migration file
check Validate all migration files
up Run pending migrations
ack <filename> Acknowledge a failed migration to skip it
status Show migration status
test Test Cassandra connection
debug Debug Cassandra connection
OPTIONS:
--host <host> Cassandra host (default: CASSANDRA_HOST env or localhost)
--port <port> Cassandra port (default: 9042)
--username <user> Cassandra username (default: CASSANDRA_USERNAME env or cassandra)
--password <pass> Cassandra password (default: CASSANDRA_PASSWORD env or cassandra)
--help Show this help message
`);
}
async function main(): Promise<void> {
const {values, positionals} = parseArgs({
allowPositionals: true,
options: {
host: {type: 'string', default: process.env['CASSANDRA_HOST'] ?? 'localhost'},
port: {type: 'string', default: '9042'},
username: {type: 'string', default: process.env['CASSANDRA_USERNAME'] ?? 'cassandra'},
password: {type: 'string', default: process.env['CASSANDRA_PASSWORD'] ?? 'cassandra'},
help: {type: 'boolean', default: false},
},
});
if (values.help || positionals.length === 0) {
printUsage();
process.exit(values.help ? 0 : 1);
}
const command = positionals[0];
const host = values.host;
const port = parseInt(values.port, 10);
const username = values.username;
const password = values.password;
try {
switch (command) {
case 'create': {
const name = positionals[1];
if (!name) {
console.error('Error: Migration name is required');
process.exit(1);
}
createMigration(name);
break;
}
case 'check':
checkMigrations();
break;
case 'up':
await runMigrations(host, port, username, password);
break;
case 'ack': {
const filename = positionals[1];
if (!filename) {
console.error('Error: Migration filename is required');
process.exit(1);
}
await acknowledgeMigration(host, port, username, password, filename);
break;
}
case 'status':
await showStatus(host, port, username, password);
break;
case 'test':
await testConnection(host, port, username, password);
break;
case 'debug':
await debugConnection(host, port, username, password);
break;
default:
console.error(`Unknown command: ${command}`);
printUsage();
process.exit(1);
}
} catch (e) {
console.error(`Error: ${e instanceof Error ? e.message : String(e)}`);
process.exit(1);
}
}
main();

View File

@@ -0,0 +1,13 @@
FROM node:24-bookworm-slim
WORKDIR /app
RUN corepack enable && corepack prepare pnpm@10.26.0 --activate
RUN echo '{"type":"module","dependencies":{"cassandra-driver":"4.8.0","tsx":"4.21.0"}}' > package.json && \
pnpm install
COPY fluxer_api/scripts/CassandraMigrate.tsx ./scripts/
COPY fluxer_api/tsconfig.json ./
ENTRYPOINT ["npx", "tsx", "scripts/CassandraMigrate.tsx"]

View File

@@ -0,0 +1,412 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import fs from 'node:fs';
import path from 'node:path';
const TS_LICENSE_HEADER = `/*
* Copyright (C) {year} Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/`;
const ERLANG_LICENSE_HEADER = `%% Copyright (C) {year} Fluxer Contributors
%%
%% This file is part of Fluxer.
%%
%% Fluxer is free software: you can redistribute it and/or modify
%% it under the terms of the GNU Affero General Public License as published by
%% the Free Software Foundation, either version 3 of the License, or
%% (at your option) any later version.
%%
%% Fluxer is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%% GNU Affero General Public License for more details.
%%
%% You should have received a copy of the GNU Affero General Public License
%% along with Fluxer. If not, see <https://www.gnu.org/licenses/>.`;
const SHELL_LICENSE_HEADER = `# Copyright (C) {year} Fluxer Contributors
#
# This file is part of Fluxer.
#
# Fluxer is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Fluxer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Fluxer. If not, see <https://www.gnu.org/licenses/>.`;
const BLOCK_COMMENT_EXTS = new Set([
'ts',
'tsx',
'js',
'jsx',
'mjs',
'cjs',
'css',
'go',
'rs',
'c',
'cc',
'cpp',
'cxx',
'h',
'hh',
'hpp',
'hxx',
'mm',
'm',
'java',
'kt',
'kts',
'swift',
'scala',
'dart',
'cs',
'fs',
]);
const HASH_LINE_EXTS = new Set(['sh', 'bash', 'zsh', 'py', 'rb', 'ps1', 'psm1', 'psd1', 'ksh', 'fish']);
type HeaderStyle = {kind: 'block'} | {kind: 'line'; prefix: string};
interface FileTemplate {
header: string;
style: HeaderStyle;
}
class Processor {
private currentYear: number;
private updated: number = 0;
private ignorePatterns: Array<string> = [];
constructor() {
this.currentYear = new Date().getFullYear();
this.loadGitignore();
}
private loadGitignore(): void {
try {
const content = fs.readFileSync('../.gitignore', 'utf-8');
for (const line of content.split('\n')) {
const trimmed = line.trim();
if (trimmed.length > 0 && !trimmed.startsWith('#')) {
this.ignorePatterns.push(trimmed);
}
}
} catch {
console.error('Warning: Could not read .gitignore file, proceeding without ignore patterns');
}
}
private shouldIgnore(filePath: string): boolean {
if (filePath.includes('fluxer_static')) {
return true;
}
for (const pattern of this.ignorePatterns) {
if (this.matchPattern(pattern, filePath)) {
return true;
}
}
return false;
}
private matchPattern(pattern: string, filePath: string): boolean {
const sep = path.sep;
if (pattern.startsWith('**/')) {
const subPattern = pattern.slice(3);
if (subPattern.endsWith('/')) {
const dirName = subPattern.slice(0, -1);
return filePath.split(sep).some((part) => part === dirName);
}
return filePath.split(sep).some((part) => part === subPattern);
}
if (pattern.endsWith('/')) {
const dirPattern = pattern.slice(0, -1);
return filePath.split(sep).some((part) => part === dirPattern) || filePath.startsWith(`${dirPattern}${sep}`);
}
if (pattern.startsWith('/')) {
return filePath === pattern.slice(1);
}
const parts = filePath.split(sep);
const fileName = path.basename(filePath);
return parts.some((part) => part === pattern) || fileName === pattern;
}
private getTemplate(filePath: string): FileTemplate | null {
const ext = path.extname(filePath).slice(1).toLowerCase();
return this.templateForExtension(ext);
}
private templateForExtension(ext: string): FileTemplate | null {
if (BLOCK_COMMENT_EXTS.has(ext)) {
return {header: TS_LICENSE_HEADER, style: {kind: 'block'}};
}
if (HASH_LINE_EXTS.has(ext)) {
return {header: SHELL_LICENSE_HEADER, style: {kind: 'line', prefix: '#'}};
}
switch (ext) {
case 'erl':
case 'hrl':
return {header: ERLANG_LICENSE_HEADER, style: {kind: 'line', prefix: '%%'}};
default:
return null;
}
}
private detectLicense(content: string): {hasHeader: boolean; detectedYear: number | null} {
const lines = content.split('\n').slice(0, 25);
let hasAgpl = false;
let hasFluxer = false;
let detectedYear: number | null = null;
const yearRegex = /\b(20\d{2})\b/;
for (const line of lines) {
const lower = line.toLowerCase();
if (lower.includes('gnu affero general public license') || lower.includes('agpl')) {
hasAgpl = true;
}
if (lower.includes('fluxer')) {
hasFluxer = true;
}
if (lower.includes('copyright') && lower.includes('fluxer') && detectedYear === null) {
const match = line.match(yearRegex);
if (match) {
const year = parseInt(match[1], 10);
if (year >= 1900 && year < 3000) {
detectedYear = year;
}
}
}
}
return {hasHeader: hasAgpl && hasFluxer, detectedYear};
}
private updateYear(content: string, oldYear: number): string {
return content.replace(oldYear.toString(), this.currentYear.toString());
}
private stripLicenseHeader(content: string, style: HeaderStyle): {stripped: string; success: boolean} {
const lines = content.split('\n');
if (lines.length === 0) {
return {stripped: content, success: false};
}
let prefixEnd = 0;
if (lines[0]?.startsWith('#!')) {
prefixEnd = 1;
}
let headerStart = prefixEnd;
while (headerStart < lines.length && lines[headerStart].trim().length === 0) {
headerStart++;
}
if (headerStart >= lines.length) {
return {stripped: content, success: false};
}
const originalEnding = content.endsWith('\n');
let afterIdx: number;
if (style.kind === 'block') {
const first = lines[headerStart].trimStart();
if (!first.startsWith('/*')) {
return {stripped: content, success: false};
}
let headerEnd = headerStart;
let foundEnd = false;
for (let i = headerStart; i < lines.length; i++) {
if (lines[i].includes('*/')) {
headerEnd = i;
foundEnd = true;
break;
}
}
if (!foundEnd) {
return {stripped: content, success: false};
}
afterIdx = headerEnd + 1;
while (afterIdx < lines.length && lines[afterIdx].trim().length === 0) {
afterIdx++;
}
} else {
const prefix = style.prefix;
const first = lines[headerStart].trimStart();
if (!first.startsWith(prefix)) {
return {stripped: content, success: false};
}
let headerEnd = headerStart;
while (headerEnd < lines.length) {
const trimmed = lines[headerEnd].trimStart();
if (trimmed.length === 0) {
break;
}
if (trimmed.startsWith(prefix)) {
headerEnd++;
continue;
}
break;
}
afterIdx = headerEnd;
while (afterIdx < lines.length && lines[afterIdx].trim().length === 0) {
afterIdx++;
}
}
const newLines = [...lines.slice(0, prefixEnd), ...lines.slice(afterIdx)];
let result = newLines.join('\n');
if (originalEnding && !result.endsWith('\n')) {
result += '\n';
}
return {stripped: result, success: true};
}
private addHeader(content: string, template: FileTemplate): string {
const header = template.header.replace('{year}', this.currentYear.toString());
const firstLine = content.split('\n')[0];
if (firstLine?.startsWith('#!')) {
const rest = content.split('\n').slice(1).join('\n');
return `${firstLine}\n\n${header}\n\n${rest}`;
}
return `${header}\n\n${content}`;
}
private processFile(filePath: string): void {
const content = fs.readFileSync(filePath, 'utf-8');
const template = this.getTemplate(filePath);
if (!template) {
return;
}
const {hasHeader, detectedYear} = this.detectLicense(content);
let newContent: string;
let action: string;
if (!hasHeader) {
newContent = this.addHeader(content, template);
action = 'Added header';
} else {
const {stripped, success} = this.stripLicenseHeader(content, template.style);
if (success) {
newContent = this.addHeader(stripped, template);
action = 'Normalized header';
} else if (detectedYear !== null) {
if (detectedYear === this.currentYear) {
return;
}
newContent = this.updateYear(content, detectedYear);
action = `Updated year ${detectedYear} \u2192 ${this.currentYear}`;
} else {
return;
}
}
fs.writeFileSync(filePath, newContent);
this.updated++;
console.log(`${action}: ${filePath}`);
}
private walkDir(dir: string): void {
let entries: Array<fs.Dirent>;
try {
entries = fs.readdirSync(dir, {withFileTypes: true});
} catch {
return;
}
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
const relativePath = path.relative('..', fullPath);
if (this.shouldIgnore(relativePath)) {
continue;
}
if (entry.isDirectory()) {
this.walkDir(fullPath);
} else if (entry.isFile()) {
const template = this.getTemplate(fullPath);
if (template) {
try {
this.processFile(fullPath);
} catch (e) {
console.error(`Error processing ${fullPath}: ${e instanceof Error ? e.message : String(e)}`);
}
}
}
}
}
walk(): void {
this.walkDir('..');
}
getUpdatedCount(): number {
return this.updated;
}
}
function main(): void {
const processor = new Processor();
processor.walk();
console.log(`Updated ${processor.getUpdatedCount()} files`);
}
main();