Cleanup batch 2: Delete unused scripts

Removed scripts identified by knip as unused:
- scripts/generateDefaultLevels.cjs
- scripts/manageAdmin.ts
- scripts/runMigration.ts
- scripts/seedLevels.ts
- scripts/exportBlend.ts (had unresolved import)

Kept: seedLeaderboard.ts (still in use)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Michael Mainguy 2025-11-28 17:42:55 -06:00
parent 8570c22a0c
commit 917cf7a120
5 changed files with 0 additions and 1057 deletions

View File

@ -1,211 +0,0 @@
#!/usr/bin/env tsx
/**
* CLI script to export Blender files to GLB format
*
* Usage:
* tsx scripts/exportBlend.ts <input.blend> <output.glb>
* npm run export-blend -- <input.blend> <output.glb>
*
* Examples:
* npm run export-blend -- public/ship1.blend public/ship1.glb
* npm run export-blend -- public/asteroid4.blend public/asteroid4.glb
*
* Options:
* --watch Watch the input file and auto-export on changes
* --compress Enable Draco mesh compression
* --no-modifiers Don't apply modifiers
* --batch Export all .blend files in a directory
*/
import { exportBlendToGLB, watchAndExport, batchExportBlendToGLB } from '../src/utils/blenderExporter.js';
import { readdirSync, statSync } from 'fs';
import path from 'path';
interface CLIArgs {
input?: string;
output?: string;
watch: boolean;
compress: boolean;
noModifiers: boolean;
batch: boolean;
}
function parseArgs(): CLIArgs {
const args: CLIArgs = {
watch: false,
compress: false,
noModifiers: false,
batch: false
};
const rawArgs = process.argv.slice(2);
for (let i = 0; i < rawArgs.length; i++) {
const arg = rawArgs[i];
if (arg === '--watch') {
args.watch = true;
} else if (arg === '--compress') {
args.compress = true;
} else if (arg === '--no-modifiers') {
args.noModifiers = true;
} else if (arg === '--batch') {
args.batch = true;
} else if (!args.input) {
args.input = arg;
} else if (!args.output) {
args.output = arg;
}
}
return args;
}
function printUsage() {
console.log(`
Usage: npm run export-blend -- <input.blend> <output.glb> [options]
Options:
--watch Watch the input file and auto-export on changes
--compress Enable Draco mesh compression
--no-modifiers Don't apply modifiers during export
--batch Export all .blend files in input directory
Examples:
npm run export-blend -- public/ship1.blend public/ship1.glb
npm run export-blend -- public/ship1.blend public/ship1.glb --compress
npm run export-blend -- public/ship1.blend public/ship1.glb --watch
npm run export-blend -- public/ public/ --batch
`);
}
async function main() {
const args = parseArgs();
if (!args.input) {
console.error('Error: Input file or directory required\n');
printUsage();
process.exit(1);
}
// Build export options
const options = {
exportParams: {
export_format: 'GLB' as const,
export_draco_mesh_compression_enable: args.compress,
export_apply_modifiers: !args.noModifiers,
export_yup: true
}
};
try {
if (args.batch) {
// Batch export mode
await batchExportMode(args.input, args.output || args.input, options);
} else if (args.watch) {
// Watch mode
if (!args.output) {
console.error('Error: Output file required for watch mode\n');
printUsage();
process.exit(1);
}
await watchMode(args.input, args.output, options);
} else {
// Single export mode
if (!args.output) {
console.error('Error: Output file required\n');
printUsage();
process.exit(1);
}
await singleExportMode(args.input, args.output, options);
}
} catch (error) {
console.error('Export failed:', error instanceof Error ? error.message : error);
process.exit(1);
}
}
async function singleExportMode(input: string, output: string, options: any) {
console.log(`Exporting ${input} to ${output}...`);
const result = await exportBlendToGLB(input, output, options);
if (result.success) {
console.log(`✅ Successfully exported in ${result.duration}ms`);
console.log(` Output: ${result.outputPath}`);
}
}
async function watchMode(input: string, output: string, options: any) {
console.log(`👀 Watching ${input} for changes...`);
console.log(` Will export to ${output}`);
console.log(` Press Ctrl+C to stop\n`);
// Do initial export
try {
await exportBlendToGLB(input, output, options);
console.log('✅ Initial export complete\n');
} catch (error) {
console.error('❌ Initial export failed:', error);
}
// Start watching
const stopWatching = watchAndExport(input, output, options);
// Handle Ctrl+C
process.on('SIGINT', () => {
console.log('\n\nStopping watch mode...');
stopWatching();
process.exit(0);
});
// Keep process alive
await new Promise(() => {});
}
async function batchExportMode(inputDir: string, outputDir: string, options: any) {
console.log(`📦 Batch exporting .blend files from ${inputDir}...`);
// Find all .blend files in input directory
const files = readdirSync(inputDir)
.filter(f => f.endsWith('.blend') && !f.endsWith('.blend1'))
.filter(f => {
const fullPath = path.join(inputDir, f);
return statSync(fullPath).isFile();
});
if (files.length === 0) {
console.log('No .blend files found in directory');
return;
}
console.log(`Found ${files.length} .blend file(s):`);
files.forEach(f => console.log(` - ${f}`));
console.log('');
const exports: Array<[string, string]> = files.map(f => {
const inputPath = path.join(inputDir, f);
const outputPath = path.join(outputDir, f.replace('.blend', '.glb'));
return [inputPath, outputPath];
});
const results = await batchExportBlendToGLB(exports, options, true); // Sequential
// Print summary
console.log('\n📊 Export Summary:');
const successful = results.filter(r => r.success).length;
console.log(`✅ Successful: ${successful}/${results.length}`);
results.forEach((result, i) => {
const [input] = exports[i];
const filename = path.basename(input);
if (result.success) {
console.log(`${filename} (${result.duration}ms)`);
} else {
console.log(`${filename} - FAILED`);
}
});
}
// Run the script
main();

View File

@ -1,215 +0,0 @@
#!/usr/bin/env node
/**
* Script to generate default level JSON files
* Run with: node scripts/generateDefaultLevels.js
*/
const fs = require('fs');
const path = require('path');
// Helper function to generate random asteroid data
function generateAsteroid(id, config, shipPos = [0, 1, 0]) {
const { distanceMin, distanceMax, rockSizeMin, rockSizeMax, forceMultiplier } = config;
// Random spherical distribution
const theta = Math.random() * Math.PI * 2; // Azimuth angle
const phi = Math.acos(2 * Math.random() - 1); // Polar angle
const distance = distanceMin + Math.random() * (distanceMax - distanceMin);
const position = [
shipPos[0] + distance * Math.sin(phi) * Math.cos(theta),
shipPos[1] + distance * Math.sin(phi) * Math.sin(theta),
shipPos[2] + distance * Math.cos(phi)
];
const scale = rockSizeMin + Math.random() * (rockSizeMax - rockSizeMin);
// Random velocity toward ship
const speedMin = 15 * forceMultiplier;
const speedMax = 30 * forceMultiplier;
const speed = speedMin + Math.random() * (speedMax - speedMin);
const dirToShip = [
shipPos[0] - position[0],
shipPos[1] - position[1],
shipPos[2] - position[2]
];
const length = Math.sqrt(dirToShip[0]**2 + dirToShip[1]**2 + dirToShip[2]**2);
const normalized = dirToShip.map(v => v / length);
const linearVelocity = normalized.map(v => v * speed);
const angularVelocity = [
(Math.random() - 0.5) * 2,
(Math.random() - 0.5) * 2,
(Math.random() - 0.5) * 2
];
return {
id: `asteroid-${id}`,
position,
scale,
linearVelocity,
angularVelocity
};
}
// Level configurations matching LevelGenerator difficulty configs
const levels = [
{
filename: 'rookie-training.json',
difficulty: 'recruit',
difficultyConfig: {
rockCount: 5,
forceMultiplier: 0.8,
rockSizeMin: 10,
rockSizeMax: 15,
distanceMin: 220,
distanceMax: 250
},
metadata: {
author: 'System',
description: 'Learn the basics of ship control and asteroid destruction in a calm sector of space.',
estimatedTime: '3-5 minutes',
type: 'default'
}
},
{
filename: 'rescue-mission.json',
difficulty: 'pilot',
difficultyConfig: {
rockCount: 10,
forceMultiplier: 1.0,
rockSizeMin: 8,
rockSizeMax: 20,
distanceMin: 225,
distanceMax: 300
},
metadata: {
author: 'System',
description: 'Clear a path through moderate asteroid density to reach the stranded station.',
estimatedTime: '5-8 minutes',
type: 'default'
}
},
{
filename: 'deep-space-patrol.json',
difficulty: 'captain',
difficultyConfig: {
rockCount: 20,
forceMultiplier: 1.2,
rockSizeMin: 5,
rockSizeMax: 40,
distanceMin: 230,
distanceMax: 450
},
metadata: {
author: 'System',
description: 'Patrol a dangerous sector with heavy asteroid activity. Watch your fuel!',
estimatedTime: '8-12 minutes',
type: 'default'
}
},
{
filename: 'enemy-territory.json',
difficulty: 'commander',
difficultyConfig: {
rockCount: 50,
forceMultiplier: 1.3,
rockSizeMin: 2,
rockSizeMax: 8,
distanceMin: 90,
distanceMax: 280
},
metadata: {
author: 'System',
description: 'Navigate through hostile space with high-speed asteroids and limited resources.',
estimatedTime: '10-15 minutes',
type: 'default'
}
},
{
filename: 'the-gauntlet.json',
difficulty: 'commander',
difficultyConfig: {
rockCount: 50,
forceMultiplier: 1.3,
rockSizeMin: 2,
rockSizeMax: 8,
distanceMin: 90,
distanceMax: 280
},
metadata: {
author: 'System',
description: 'Face maximum asteroid density in this ultimate test of piloting skill.',
estimatedTime: '12-18 minutes',
type: 'default'
}
},
{
filename: 'final-challenge.json',
difficulty: 'commander',
difficultyConfig: {
rockCount: 50,
forceMultiplier: 1.3,
rockSizeMin: 2,
rockSizeMax: 8,
distanceMin: 90,
distanceMax: 280
},
metadata: {
author: 'System',
description: 'The ultimate challenge - survive the most chaotic asteroid field in known space.',
estimatedTime: '15-20 minutes',
type: 'default'
}
}
];
// Output directory
const outputDir = path.join(__dirname, '../public/levels');
// Ensure directory exists
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true });
}
// Generate each level
for (const level of levels) {
const asteroids = [];
for (let i = 0; i < level.difficultyConfig.rockCount; i++) {
asteroids.push(generateAsteroid(i, level.difficultyConfig));
}
const levelConfig = {
version: '1.0',
difficulty: level.difficulty,
timestamp: new Date().toISOString(),
metadata: level.metadata,
ship: {
position: [0, 1, 0],
rotation: [0, 0, 0],
linearVelocity: [0, 0, 0],
angularVelocity: [0, 0, 0]
},
startBase: {
position: [0, 0, 0],
baseGlbPath: 'base.glb'
},
sun: {
position: [0, 0, 400],
diameter: 50,
intensity: 1000000
},
planets: [],
asteroids,
difficultyConfig: level.difficultyConfig
};
const outputPath = path.join(outputDir, level.filename);
fs.writeFileSync(outputPath, JSON.stringify(levelConfig, null, 2));
console.log(`Generated: ${level.filename} (${level.difficultyConfig.rockCount} asteroids)`);
}
console.log(`\nSuccessfully generated ${levels.length} default level files!`);

View File

@ -1,207 +0,0 @@
/**
* Admin management script for Supabase
*
* Usage:
* npm run admin:add -- --user-id="facebook|123" --name="John" --email="john@example.com"
* npm run admin:add -- --user-id="facebook|123" --super # Add as super admin (all permissions)
* npm run admin:list # List all admins
* npm run admin:remove -- --user-id="facebook|123" # Remove admin
*
* Required .env variables:
* SUPABASE_DB_URL - Direct DB connection string
*/
import postgres from 'postgres';
import * as path from 'path';
import { fileURLToPath } from 'url';
import * as dotenv from 'dotenv';
// ES module equivalent of __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Load environment variables
dotenv.config();
const DATABASE_URL = process.env.SUPABASE_DB_URL;
if (!DATABASE_URL) {
console.error('Missing SUPABASE_DB_URL environment variable.');
process.exit(1);
}
const sql = postgres(DATABASE_URL);
// Parse command line args
const args = process.argv.slice(2);
const command = args[0]; // 'add', 'list', 'remove'
function getArg(name: string): string | null {
const arg = args.find(a => a.startsWith(`--${name}=`));
return arg ? arg.split('=')[1] : null;
}
function hasFlag(name: string): boolean {
return args.includes(`--${name}`);
}
async function addAdmin() {
const userId = getArg('user-id');
const displayName = getArg('name') || null;
const email = getArg('email') || null;
const isSuper = hasFlag('super');
if (!userId) {
console.error('Missing required --user-id argument');
console.error('Usage: npm run admin:add -- --user-id="facebook|123" --name="John" [--super]');
process.exit(1);
}
console.log(`\nAdding admin: ${userId}`);
if (isSuper) {
console.log(' Type: Super Admin (all permissions)');
}
try {
const result = await sql`
INSERT INTO admins (
user_id,
display_name,
email,
can_review_levels,
can_manage_admins,
can_manage_official,
can_view_analytics,
is_active
) VALUES (
${userId},
${displayName},
${email},
true,
${isSuper},
${isSuper},
${isSuper},
true
)
ON CONFLICT (user_id) DO UPDATE SET
display_name = COALESCE(EXCLUDED.display_name, admins.display_name),
email = COALESCE(EXCLUDED.email, admins.email),
can_review_levels = true,
can_manage_admins = ${isSuper} OR admins.can_manage_admins,
can_manage_official = ${isSuper} OR admins.can_manage_official,
can_view_analytics = ${isSuper} OR admins.can_view_analytics,
is_active = true
RETURNING *
`;
console.log('\n✓ Admin added/updated successfully!');
console.log('\nPermissions:');
console.log(` can_review_levels: ${result[0].can_review_levels}`);
console.log(` can_manage_admins: ${result[0].can_manage_admins}`);
console.log(` can_manage_official: ${result[0].can_manage_official}`);
console.log(` can_view_analytics: ${result[0].can_view_analytics}`);
} catch (error: any) {
console.error('Failed to add admin:', error.message);
process.exit(1);
}
}
async function listAdmins() {
console.log('\nCurrent Admins:\n');
const admins = await sql`
SELECT
user_id,
display_name,
email,
can_review_levels,
can_manage_admins,
can_manage_official,
can_view_analytics,
is_active,
expires_at,
created_at
FROM admins
ORDER BY created_at
`;
if (admins.length === 0) {
console.log(' No admins found.');
return;
}
for (const admin of admins) {
const status = admin.is_active ? '✓ active' : '✗ inactive';
const perms = [
admin.can_review_levels ? 'review' : null,
admin.can_manage_admins ? 'manage_admins' : null,
admin.can_manage_official ? 'manage_official' : null,
admin.can_view_analytics ? 'analytics' : null,
].filter(Boolean).join(', ');
console.log(` ${admin.user_id}`);
console.log(` Name: ${admin.display_name || '(not set)'}`);
console.log(` Email: ${admin.email || '(not set)'}`);
console.log(` Status: ${status}`);
console.log(` Permissions: ${perms || 'none'}`);
if (admin.expires_at) {
console.log(` Expires: ${admin.expires_at}`);
}
console.log('');
}
console.log(`Total: ${admins.length} admin(s)`);
}
async function removeAdmin() {
const userId = getArg('user-id');
if (!userId) {
console.error('Missing required --user-id argument');
console.error('Usage: npm run admin:remove -- --user-id="facebook|123"');
process.exit(1);
}
console.log(`\nRemoving admin: ${userId}`);
const result = await sql`
DELETE FROM admins WHERE user_id = ${userId} RETURNING user_id
`;
if (result.length === 0) {
console.log(' Admin not found.');
} else {
console.log('✓ Admin removed successfully!');
}
}
async function main() {
try {
switch (command) {
case 'add':
await addAdmin();
break;
case 'list':
await listAdmins();
break;
case 'remove':
await removeAdmin();
break;
default:
console.log('Admin Management Script\n');
console.log('Commands:');
console.log(' npm run admin:add -- --user-id="id" [--name="Name"] [--email="email"] [--super]');
console.log(' npm run admin:list');
console.log(' npm run admin:remove -- --user-id="id"');
break;
}
} finally {
await sql.end();
}
}
main().catch((error) => {
console.error('Error:', error.message);
sql.end();
process.exit(1);
});

View File

@ -1,182 +0,0 @@
/**
* Migration runner for Supabase database
*
* Usage:
* npm run migrate # Run all pending migrations
* npm run migrate -- --file=001_cloud_levels.sql # Run specific migration
* npm run migrate -- --status # Show migration status
*
* Required .env variables:
* SUPABASE_DB_URL - Direct DB connection string (Settings Database URI)
*/
import postgres from 'postgres';
import * as fs from 'fs';
import * as path from 'path';
import { fileURLToPath } from 'url';
import * as dotenv from 'dotenv';
// ES module equivalent of __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Load environment variables
dotenv.config();
const DATABASE_URL = process.env.SUPABASE_DB_URL;
if (!DATABASE_URL) {
console.error('Missing SUPABASE_DB_URL environment variable.');
console.error('Get it from Supabase → Settings → Database → Connection string (URI)');
console.error('Use the "Session pooler" connection string for IPv4 compatibility.');
process.exit(1);
}
const sql = postgres(DATABASE_URL);
const MIGRATIONS_DIR = path.join(__dirname, '..', 'supabase', 'migrations');
/**
* Ensure migrations tracking table exists
*/
async function ensureMigrationsTable(): Promise<void> {
await sql`
CREATE TABLE IF NOT EXISTS _migrations (
id SERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL,
executed_at TIMESTAMPTZ DEFAULT NOW()
)
`;
}
/**
* Get list of already executed migrations
*/
async function getExecutedMigrations(): Promise<string[]> {
const result = await sql`SELECT name FROM _migrations ORDER BY id`;
return result.map(row => row.name);
}
/**
* Get list of migration files
*/
function getMigrationFiles(): string[] {
if (!fs.existsSync(MIGRATIONS_DIR)) {
console.error(`Migrations directory not found: ${MIGRATIONS_DIR}`);
process.exit(1);
}
return fs.readdirSync(MIGRATIONS_DIR)
.filter(f => f.endsWith('.sql'))
.sort();
}
/**
* Run a single migration file
*/
async function runMigration(filename: string): Promise<void> {
const filepath = path.join(MIGRATIONS_DIR, filename);
if (!fs.existsSync(filepath)) {
throw new Error(`Migration file not found: ${filepath}`);
}
const content = fs.readFileSync(filepath, 'utf-8');
console.log(` Running: ${filename}...`);
try {
// Execute the migration
await sql.unsafe(content);
// Record the migration
await sql`INSERT INTO _migrations (name) VALUES (${filename})`;
console.log(`${filename} completed`);
} catch (error) {
console.error(`${filename} failed:`, error.message);
throw error;
}
}
/**
* Run all pending migrations
*/
async function runAllMigrations(): Promise<void> {
await ensureMigrationsTable();
const executed = await getExecutedMigrations();
const files = getMigrationFiles();
const pending = files.filter(f => !executed.includes(f));
if (pending.length === 0) {
console.log('No pending migrations.');
return;
}
console.log(`\nRunning ${pending.length} migration(s):\n`);
for (const file of pending) {
await runMigration(file);
}
console.log('\n✓ All migrations completed successfully!');
}
/**
* Show migration status
*/
async function showStatus(): Promise<void> {
await ensureMigrationsTable();
const executed = await getExecutedMigrations();
const files = getMigrationFiles();
console.log('\nMigration Status:\n');
console.log(' File Status');
console.log(' -------------------------------- --------');
for (const file of files) {
const status = executed.includes(file) ? '✓ done' : '○ pending';
console.log(` ${file.padEnd(34)} ${status}`);
}
const pending = files.filter(f => !executed.includes(f));
console.log(`\n Total: ${files.length} | Done: ${executed.length} | Pending: ${pending.length}\n`);
}
// Parse command line args
const args = process.argv.slice(2);
const showStatusFlag = args.includes('--status');
const fileArg = args.find(arg => arg.startsWith('--file='));
const specificFile = fileArg ? fileArg.split('=')[1] : null;
async function main() {
try {
if (showStatusFlag) {
await showStatus();
} else if (specificFile) {
await ensureMigrationsTable();
const executed = await getExecutedMigrations();
if (executed.includes(specificFile)) {
console.log(`Migration ${specificFile} has already been executed.`);
console.log('To re-run, manually delete it from _migrations table first.');
} else {
console.log(`\nRunning specific migration:\n`);
await runMigration(specificFile);
console.log('\n✓ Migration completed!');
}
} else {
await runAllMigrations();
}
} finally {
await sql.end();
}
}
main().catch((error) => {
console.error('\nMigration failed:', error.message);
sql.end();
process.exit(1);
});

View File

@ -1,242 +0,0 @@
/**
* Seed script for populating official levels from JSON files
*
* Usage:
* npm run seed:levels # Seed all levels from directory.json
* npm run seed:levels -- --clean # Delete all official levels first
* npm run seed:levels -- --admin-id="facebook|123" # Specify admin user ID
*
* Required .env variables:
* SUPABASE_DB_URL - Direct DB connection string
*
* Note: Requires an admin user with can_manage_official permission.
* The script will use the first super admin found, or you can specify --admin-id.
*/
import postgres from 'postgres';
import * as fs from 'fs';
import * as path from 'path';
import { fileURLToPath } from 'url';
import * as dotenv from 'dotenv';
// ES module equivalent of __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Load environment variables
dotenv.config();
const DATABASE_URL = process.env.SUPABASE_DB_URL;
if (!DATABASE_URL) {
console.error('Missing SUPABASE_DB_URL environment variable.');
process.exit(1);
}
const sql = postgres(DATABASE_URL);
const LEVELS_DIR = path.join(__dirname, '..', 'public', 'levels');
const DIRECTORY_FILE = path.join(LEVELS_DIR, 'directory.json');
interface DirectoryEntry {
id: string;
name: string;
description: string;
version: string;
levelPath: string;
difficulty: string;
estimatedTime: string;
missionBrief: string[];
unlockRequirements: string[];
tags: string[];
defaultLocked: boolean;
}
interface Directory {
version: string;
levels: DirectoryEntry[];
}
// Parse command line args
const args = process.argv.slice(2);
const cleanFirst = args.includes('--clean');
const adminIdArg = args.find(a => a.startsWith('--admin-id='));
const specifiedAdminId = adminIdArg ? adminIdArg.split('=')[1] : null;
/**
* Get an admin's internal user ID (UUID) with manage_official permission
*/
async function getAdminInternalUserId(): Promise<string> {
if (specifiedAdminId) {
// Verify the specified admin exists and has permission
const admin = await sql`
SELECT internal_user_id FROM admins
WHERE user_id = ${specifiedAdminId}
AND is_active = true
AND can_manage_official = true
AND (expires_at IS NULL OR expires_at > NOW())
`;
if (admin.length === 0) {
throw new Error(`Admin ${specifiedAdminId} not found or lacks manage_official permission`);
}
if (!admin[0].internal_user_id) {
throw new Error(`Admin ${specifiedAdminId} has no internal user ID. Run migration 002 first.`);
}
return admin[0].internal_user_id;
}
// Find any admin with manage_official permission
const admins = await sql`
SELECT internal_user_id FROM admins
WHERE is_active = true
AND can_manage_official = true
AND internal_user_id IS NOT NULL
AND (expires_at IS NULL OR expires_at > NOW())
LIMIT 1
`;
if (admins.length === 0) {
throw new Error('No admin found with manage_official permission and internal user ID. Run admin:add first.');
}
return admins[0].internal_user_id;
}
/**
* Clean existing official levels
*/
async function cleanOfficialLevels(): Promise<void> {
console.log('\nDeleting existing official levels...');
const result = await sql`
DELETE FROM levels WHERE level_type = 'official' RETURNING id
`;
console.log(` Deleted ${result.length} official level(s)`);
}
/**
* Seed levels from directory.json
*/
async function seedLevels(): Promise<void> {
// Read directory.json
if (!fs.existsSync(DIRECTORY_FILE)) {
throw new Error(`Directory file not found: ${DIRECTORY_FILE}`);
}
const directory: Directory = JSON.parse(fs.readFileSync(DIRECTORY_FILE, 'utf-8'));
console.log(`\nFound ${directory.levels.length} levels in directory.json (v${directory.version})`);
// Get admin's internal user ID (UUID)
const adminUserId = await getAdminInternalUserId();
console.log(`Using admin internal ID: ${adminUserId}\n`);
let inserted = 0;
let updated = 0;
let failed = 0;
for (let i = 0; i < directory.levels.length; i++) {
const entry = directory.levels[i];
const levelPath = path.join(LEVELS_DIR, entry.levelPath);
process.stdout.write(` [${i + 1}/${directory.levels.length}] ${entry.name}... `);
// Check if level config file exists
if (!fs.existsSync(levelPath)) {
console.log('✗ config file not found');
failed++;
continue;
}
try {
// Read level config
const config = JSON.parse(fs.readFileSync(levelPath, 'utf-8'));
// Upsert the level
const result = await sql`
INSERT INTO levels (
user_id,
slug,
name,
description,
difficulty,
estimated_time,
tags,
config,
mission_brief,
level_type,
sort_order,
unlock_requirements,
default_locked
) VALUES (
${adminUserId},
${entry.id},
${entry.name},
${entry.description},
${entry.difficulty},
${entry.estimatedTime},
${entry.tags},
${JSON.stringify(config)},
${entry.missionBrief},
'official',
${i},
${entry.unlockRequirements},
${entry.defaultLocked}
)
ON CONFLICT (slug) DO UPDATE SET
name = EXCLUDED.name,
description = EXCLUDED.description,
difficulty = EXCLUDED.difficulty,
estimated_time = EXCLUDED.estimated_time,
tags = EXCLUDED.tags,
config = EXCLUDED.config,
mission_brief = EXCLUDED.mission_brief,
sort_order = EXCLUDED.sort_order,
unlock_requirements = EXCLUDED.unlock_requirements,
default_locked = EXCLUDED.default_locked,
updated_at = NOW()
RETURNING (xmax = 0) as is_insert
`;
if (result[0].is_insert) {
console.log('✓ inserted');
inserted++;
} else {
console.log('✓ updated');
updated++;
}
} catch (error: any) {
console.log(`${error.message}`);
failed++;
}
}
console.log('\n----------------------------------------');
console.log(`Inserted: ${inserted}`);
console.log(`Updated: ${updated}`);
console.log(`Failed: ${failed}`);
console.log(`Total: ${directory.levels.length}`);
if (failed === 0) {
console.log('\n✓ All levels seeded successfully!');
} else {
console.log('\n⚠ Some levels failed to seed');
}
}
async function main() {
try {
if (cleanFirst) {
await cleanOfficialLevels();
}
await seedLevels();
} finally {
await sql.end();
}
}
main().catch((error) => {
console.error('\nSeeding failed:', error.message);
sql.end();
process.exit(1);
});