Better dev experience, better front page
All checks were successful
pipeline / build-and-push-images (push) Successful in 5m30s

This commit is contained in:
2026-01-20 21:20:13 +01:00
parent de9406a583
commit 4df99a4312
16 changed files with 1419 additions and 197 deletions

3
dev/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
node_modules
data/db
data

40
dev/README.md Normal file
View File

@@ -0,0 +1,40 @@
# BuildPath Development Database Setup
This directory contains scripts and tools for setting up a development MongoDB instance with realistic data for frontend testing.
## 📁 Structure
```
dev/
├── data/ # Data files (patches.json, match files, db data)
├── scripts/ # Setup and import scripts
│ ├── setup-db.js # Main setup script
│ ├── process-matches.js # Stream-based match importer
└── README.md # This file
```
## Quick Start
```bash
node dev/scripts/setup-db.js
```
This will:
1. Download a production snapshot with realistic data
2. Import patches data
3. Import matches using stream processing (optimized for large files)
4. Generate champion statistics
## Individual Commands
### Generate Stats Only
```bash
node dev/scripts/setup-db.js generate-stats
```
### Check Database Status
```bash
node dev/scripts/setup-db.js status
```

32
dev/docker-compose.yml Normal file
View File

@@ -0,0 +1,32 @@
services:
# Development MongoDB with performance optimizations
mongodb:
image: mongo:latest
container_name: buildpath-mongodb
ports:
- "27017:27017"
environment:
MONGO_INITDB_ROOT_USERNAME: ${MONGO_USER:-root}
MONGO_INITDB_ROOT_PASSWORD: ${MONGO_PASS:-password}
volumes:
- ./data/db:/data/db
command: mongod --wiredTigerCacheSizeGB 4 --quiet
healthcheck:
test: echo 'db.runCommand("ping").ok' | mongosh localhost:27017/test --quiet
interval: 5s
timeout: 2s
retries: 30
mongo-express:
image: mongo-express
ports:
- "8081:8081"
environment:
ME_CONFIG_MONGODB_SERVER: mongodb
ME_CONFIG_MONGODB_ADMINUSERNAME: ${MONGO_USER:-root}
ME_CONFIG_MONGODB_ADMINPASSWORD: ${MONGO_PASS:-password}
ME_CONFIG_BASICAUTH_USERNAME: admin
ME_CONFIG_BASICAUTH_PASSWORD: admin123
depends_on:
mongodb:
condition: service_healthy

168
dev/package-lock.json generated Normal file
View File

@@ -0,0 +1,168 @@
{
"name": "buildpath-dev-tools",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "buildpath-dev-tools",
"version": "1.0.0",
"dependencies": {
"mongodb": "^6.10.0"
},
"devDependencies": {
"@types/node": "^22.10.1"
}
},
"node_modules/@mongodb-js/saslprep": {
"version": "1.4.5",
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.4.5.tgz",
"integrity": "sha512-k64Lbyb7ycCSXHSLzxVdb2xsKGPMvYZfCICXvDsI8Z65CeWQzTEKS4YmGbnqw+U9RBvLPTsB6UCmwkgsDTGWIw==",
"dependencies": {
"sparse-bitfield": "^3.0.3"
}
},
"node_modules/@types/node": {
"version": "22.19.7",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.7.tgz",
"integrity": "sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw==",
"dev": true,
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/@types/webidl-conversions": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz",
"integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="
},
"node_modules/@types/whatwg-url": {
"version": "11.0.5",
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz",
"integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==",
"dependencies": {
"@types/webidl-conversions": "*"
}
},
"node_modules/bson": {
"version": "6.10.4",
"resolved": "https://registry.npmjs.org/bson/-/bson-6.10.4.tgz",
"integrity": "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng==",
"engines": {
"node": ">=16.20.1"
}
},
"node_modules/memory-pager": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz",
"integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="
},
"node_modules/mongodb": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.21.0.tgz",
"integrity": "sha512-URyb/VXMjJ4da46OeSXg+puO39XH9DeQpWCslifrRn9JWugy0D+DvvBvkm2WxmHe61O/H19JM66p1z7RHVkZ6A==",
"dependencies": {
"@mongodb-js/saslprep": "^1.3.0",
"bson": "^6.10.4",
"mongodb-connection-string-url": "^3.0.2"
},
"engines": {
"node": ">=16.20.1"
},
"peerDependencies": {
"@aws-sdk/credential-providers": "^3.188.0",
"@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
"gcp-metadata": "^5.2.0",
"kerberos": "^2.0.1",
"mongodb-client-encryption": ">=6.0.0 <7",
"snappy": "^7.3.2",
"socks": "^2.7.1"
},
"peerDependenciesMeta": {
"@aws-sdk/credential-providers": {
"optional": true
},
"@mongodb-js/zstd": {
"optional": true
},
"gcp-metadata": {
"optional": true
},
"kerberos": {
"optional": true
},
"mongodb-client-encryption": {
"optional": true
},
"snappy": {
"optional": true
},
"socks": {
"optional": true
}
}
},
"node_modules/mongodb-connection-string-url": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz",
"integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==",
"dependencies": {
"@types/whatwg-url": "^11.0.2",
"whatwg-url": "^14.1.0 || ^13.0.0"
}
},
"node_modules/punycode": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
"engines": {
"node": ">=6"
}
},
"node_modules/sparse-bitfield": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz",
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
"dependencies": {
"memory-pager": "^1.0.2"
}
},
"node_modules/tr46": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
"integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
"dependencies": {
"punycode": "^2.3.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true
},
"node_modules/webidl-conversions": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
"integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
"engines": {
"node": ">=12"
}
},
"node_modules/whatwg-url": {
"version": "14.2.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
"integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
"dependencies": {
"tr46": "^5.1.0",
"webidl-conversions": "^7.0.0"
},
"engines": {
"node": ">=18"
}
}
}
}

19
dev/package.json Normal file
View File

@@ -0,0 +1,19 @@
{
"name": "buildpath-dev-tools",
"version": "1.0.0",
"description": "Development tools for BuildPath database setup",
"main": "scripts/setup-db.js",
"scripts": {
"setup": "node scripts/setup-db.js",
"import-matches": "node scripts/setup-db.js import-matches",
"import-patches": "node scripts/setup-db.js import-patches",
"generate-stats": "node scripts/setup-db.js generate-stats",
"status": "node scripts/setup-db.js status"
},
"dependencies": {
"mongodb": "^6.10.0"
},
"devDependencies": {
"@types/node": "^22.10.1"
}
}

View File

@@ -0,0 +1,144 @@
#!/usr/bin/env node
const { MongoClient, ObjectId } = require('mongodb');
const fs = require('fs');
const { createReadStream } = require('fs');
const { createInterface } = require('readline');
const path = require('path');
/**
* Stream-based import of large JSON files
* Optimized for 9GB+ files with minimal memory usage
*/
async function importLargeJsonFile(filePath, collectionName, batchSize = 1000) {
console.log(` 📁 File: ${filePath}`);
console.log(` 📦 Collection: ${collectionName}`);
console.log(` 🔄 Batch Size: ${batchSize}`);
const startTime = Date.now();
let processed = 0;
let skipped = 0;
// Connect to MongoDB
const client = new MongoClient(process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin');
await client.connect();
const db = client.db('matches');
const collection = db.collection(collectionName);
try {
// Create indexes first for better performance
await collection.createIndex({ "metadata.matchId": 1 }, { unique: true });
await collection.createIndex({ "info.gameDuration": 1 });
await collection.createIndex({ "info.participants.championId": 1 });
await collection.createIndex({ "info.participants.win": 1 });
// Check file size
const fileStats = fs.statSync(filePath);
const fileSize = (fileStats.size / (1024 * 1024 * 1024)).toFixed(2);
console.log(` 📊 File size: ${fileSize} GB`);
await processLineDelimitedFormat(filePath, collection, batchSize, startTime);
const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`🎉 Import complete in ${totalTime} seconds`);
console.log(`✅ Processed: ${processed.toLocaleString()} matches`);
if (skipped > 0) {
console.log(`⚠️ Skipped: ${skipped.toLocaleString()} invalid entries`);
}
} catch (error) {
console.error('❌ Import failed:', error);
process.exit(1);
} finally {
await client.close();
}
async function processLineDelimitedFormat(filePath, collection, batchSize, startTime) {
const fileStream = createReadStream(filePath);
const rl = createInterface({
input: fileStream,
crlfDelay: Infinity
});
let batch = [];
let lineCount = 0;
for await (const line of rl) {
lineCount++;
process.stdout.write(`\r Processing line ${lineCount.toLocaleString()}... `);
try {
if (line.trim() === '') continue;
const match = JSON.parse(line);
if (!match.metadata || !match.metadata.matchId) {
skipped++;
continue;
}
// Convert $oid fields to proper ObjectId format
if (match._id && match._id.$oid) {
match._id = new ObjectId(match._id.$oid);
}
batch.push(match);
if (batch.length >= batchSize) {
process.stdout.write(`\r Inserting batch into MongoDB... `);
await insertBatch(batch, collection);
batch = [];
}
} catch (error) {
skipped++;
}
}
// Insert remaining matches
if (batch.length > 0) {
await insertBatch(batch, collection);
}
}
async function insertBatch(batch, collection) {
if (batch.length === 0) return;
try {
const result = await collection.insertMany(batch, {
ordered: false, // Continue on errors
writeConcern: { w: 1 } // Acknowledge writes
});
return result;
} catch (error) {
if (error.code === 11000) {
// Duplicate matches - skip
return;
}
console.error(`❌ Batch insert error: ${error.message}`);
throw error;
}
}
}
// Run the import if called directly
if (require.main === module) {
const args = process.argv.slice(2);
if (args.length < 2) {
console.log('Usage: node process-matches.js <file-path> <collection-name> [batch-size]');
console.log('Example: node process-matches.js ../data/16_1_1_matches.json 16.1.1 1000');
process.exit(1);
}
const filePath = path.resolve(args[0]);
const collectionName = args[1];
const batchSize = args[2] ? parseInt(args[2]) : 1000;
importLargeJsonFile(filePath, collectionName, batchSize)
.then(() => process.exit(0))
.catch((error) => {
console.error('Import failed:', error);
process.exit(1);
});
}
module.exports = { importLargeJsonFile };

442
dev/scripts/setup-db.js Normal file
View File

@@ -0,0 +1,442 @@
#!/usr/bin/env node
const { MongoClient } = require('mongodb');
const { execSync } = require('child_process');
const path = require('path');
const fs = require('fs');
const https = require('https');
const tar = require('tar');
/**
* Main database setup script
* Orchestrates the complete data import and stats generation process
*/
async function setupDatabase() {
console.log('🚀 Starting BuildPath database setup...');
console.log('=====================================');
// 1. Get latest patch version
const latestPatch = await getLatestPatchVersion();
console.log(`🎯 Latest patch version: ${latestPatch}`);
// 2. Check if data directory exists and has files
console.log('🔍 Checking for data files...');
const dataDir = path.join(__dirname, '../data');
const dataFiles = [
{ path: 'patches.json', required: true, description: 'Patches data' },
{ path: `${latestPatch}_matches.json`, required: true, description: 'Match data' }
];
// Create data directory if it doesn't exist
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
let filesExist = true;
for (const file of dataFiles) {
const fullPath = path.join(dataDir, file.path);
if (file.required && !fs.existsSync(fullPath)) {
filesExist = false;
break;
}
}
if (!filesExist) {
console.log('📥 No data files found. Downloading latest snapshot...');
await downloadAndExtractSnapshot();
} else {
console.log('✅ Data files found');
for (const file of dataFiles) {
const fullPath = path.join(dataDir, file.path);
const stats = fs.statSync(fullPath);
const size = (stats.size / (1024 * 1024 * 1024)).toFixed(2);
console.log(`✅ Found ${file.description}: ${size} GB`);
}
}
// 3. Start MongoDB if not running
console.log('🔄 Ensuring MongoDB is running...');
try {
execSync('docker compose up -d mongodb', {
stdio: 'inherit',
cwd: path.join(__dirname, '..')
});
} catch (error) {
console.log('MongoDB service status:', error.message);
}
// 4. Wait for MongoDB to be ready
await waitForMongoDB();
// 5. Import patches data
console.log('📦 Importing patches data...');
await importPatchesData();
// 6. Check existing matches count and import if needed
console.log('Checking existing matches count...');
const matchCount = await getMatchCount(latestPatch);
console.log(`📊 Current matches in database: ${matchCount}`);
if (matchCount < 100) {
console.log('📥 Importing matches (this may take a while)...');
await importMatchesData(latestPatch);
} else {
console.log('✅ Skipping matches import - sufficient data already present');
}
// 7. Run match collector to generate stats
console.log('📊 Generating champion stats...');
await generateChampionStats();
console.log('🎉 Database setup complete!');
console.log('=====================================');
console.log('📊 Your development database is ready!');
console.log('🔗 Connect to MongoDB: mongodb://root:password@localhost:27017');
console.log('🌐 Access Mongo Express: http://localhost:8081');
}
async function getLatestPatchVersion() {
try {
const filePath = path.join(__dirname, '../data/patches.json');
const fileContent = fs.readFileSync(filePath, 'utf8');
// Check if it's line-delimited JSON or array format
let patchesData;
if (fileContent.trim().startsWith('[')) {
// Array format
patchesData = JSON.parse(fileContent);
if (!Array.isArray(patchesData)) {
throw new Error('Patches data should be an array');
}
} else {
// Line-delimited JSON format
patchesData = fileContent.split('\n')
.filter(line => line.trim() !== '')
.map(line => JSON.parse(line));
}
// Convert dates to Date objects for proper sorting
patchesData = patchesData.map(patch => ({
...patch,
date: new Date(patch.date.$date || patch.date)
}));
// Sort patches by date (newest first) and get the latest
const sortedPatches = patchesData.sort((a, b) => b.date - a.date);
const latestPatch = sortedPatches[0];
if (!latestPatch || !latestPatch.patch) {
throw new Error('Could not find patch version in patches data');
}
return latestPatch.patch;
} catch (error) {
console.error('❌ Failed to get latest patch version:', error);
throw error;
}
}
async function downloadAndExtractSnapshot() {
const snapshotUrl = 'https://vhaudiquet.fr/public/buildpath-dev-snapshot.tar.xz';
const dataDir = path.join(__dirname, '../data');
const tempFile = path.join(dataDir, 'buildpath-dev-snapshot.tar.xz');
const extractDir = dataDir;
console.log('📥 Downloading snapshot...');
// Download the file
await new Promise((resolve, reject) => {
const file = fs.createWriteStream(tempFile);
https.get(snapshotUrl, (response) => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to download snapshot: ${response.statusCode}`));
return;
}
response.pipe(file);
file.on('finish', () => {
file.close();
resolve();
});
}).on('error', (error) => {
fs.unlink(tempFile, () => {});
reject(error);
});
});
console.log('✅ Download complete. Extracting...');
// Extract the tar.xz file
await tar.x({
file: tempFile,
cwd: extractDir,
strip: 1
});
// Clean up the downloaded file
fs.unlinkSync(tempFile);
console.log('✅ Extraction complete');
}
async function waitForMongoDB() {
const client = new MongoClient(getMongoUri());
let retries = 30;
while (retries > 0) {
try {
await client.connect();
await client.db('admin').command({ ping: 1 });
await client.close();
console.log('✅ MongoDB is ready');
return;
} catch (error) {
retries--;
if (retries === 0) {
console.error('❌ Failed to connect to MongoDB after multiple attempts');
throw error;
}
await new Promise(resolve => setTimeout(resolve, 2000));
console.log(`Waiting for MongoDB... (${retries} retries left)`);
}
}
}
async function importPatchesData() {
const client = new MongoClient(getMongoUri());
await client.connect();
try {
const filePath = path.join(__dirname, '../data/patches.json');
const fileContent = fs.readFileSync(filePath, 'utf8');
// Check if it's line-delimited JSON or array format
let patchesData;
if (fileContent.trim().startsWith('[')) {
// Array format
patchesData = JSON.parse(fileContent);
if (!Array.isArray(patchesData)) {
throw new Error('Patches data should be an array');
}
} else {
// Line-delimited JSON format
patchesData = fileContent.split('\n')
.filter(line => line.trim() !== '')
.map(line => {
const doc = JSON.parse(line);
return convertMongoExtendedJson(doc);
});
}
// Convert any extended JSON in array format too
if (Array.isArray(patchesData)) {
patchesData = patchesData.map(doc => convertMongoExtendedJson(doc));
}
const db = client.db('patches');
const collection = db.collection('patches');
// Clear existing data
await collection.deleteMany({});
// Insert new data
const result = await collection.insertMany(patchesData);
console.log(`✅ Imported ${result.insertedCount} patches`);
// Create index
await collection.createIndex({ date: -1 });
console.log('✅ Created patches index');
} catch (error) {
console.error('❌ Failed to import patches:', error);
throw error;
} finally {
await client.close();
}
}
async function importMatchesData(patchVersion) {
const matchesFile = path.join(__dirname, '../data', `${patchVersion}_matches.json`);
const collectionName = patchVersion;
try {
const result = execSync(
`node ${path.join(__dirname, 'process-matches.js')} ${matchesFile} ${collectionName} 1000`,
{
stdio: 'inherit',
env: { ...process.env, MONGO_URI: getMongoUri() }
}
);
console.log('✅ Matches import completed');
} catch (error) {
console.error('❌ Failed to import matches:', error);
throw error;
}
}
async function generateChampionStats() {
try {
console.log('🔄 Running match collector...');
// Set environment variables for development mode
const env = {
...process.env,
NODE_ENV: 'development',
USE_IMPORTED_DATA: 'true',
MONGO_URI: getMongoUri(),
MONGO_USER: 'root',
MONGO_PASS: 'password',
MONGO_HOST: 'localhost'
};
// Run the match collector directly with tsx (TypeScript executor) instead of docker compose
const matchCollectorPath = path.join(__dirname, '../../match_collector/index.ts');
execSync(`npx tsx ${matchCollectorPath}`, {
stdio: 'inherit',
env: env,
cwd: path.join(__dirname, '../..')
});
console.log('✅ Champion stats generated');
} catch (error) {
console.error('❌ Failed to generate champion stats:', error);
throw error;
}
}
async function getMatchCount(patchVersion) {
const client = new MongoClient(getMongoUri());
await client.connect();
try {
const db = client.db('matches');
const collection = db.collection(patchVersion);
const count = await collection.countDocuments();
return count;
} catch (error) {
console.error('❌ Failed to get match count:', error);
return 0;
} finally {
await client.close();
}
}
function getMongoUri() {
return process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin';
}
/**
* Convert MongoDB extended JSON format to standard MongoDB objects
* Handles $oid, $date, and other extended JSON operators
*/
function convertMongoExtendedJson(doc) {
if (!doc || typeof doc !== 'object') {
return doc;
}
// Handle ObjectId
if (doc._id && doc._id.$oid) {
doc._id = new ObjectId(doc._id.$oid);
}
// Handle Date
if (doc.date && doc.date.$date) {
doc.date = new Date(doc.date.$date);
}
// Recursively process nested objects
for (const key in doc) {
if (doc[key] && typeof doc[key] === 'object') {
if (Array.isArray(doc[key])) {
doc[key] = doc[key].map(item => convertMongoExtendedJson(item));
} else {
doc[key] = convertMongoExtendedJson(doc[key]);
}
}
}
return doc;
}
// MongoDB ObjectId for extended JSON conversion
const { ObjectId } = require('mongodb');
// Additional utility functions
async function checkDatabaseStatus() {
const client = new MongoClient(getMongoUri());
try {
await client.connect();
const adminDb = client.db('admin');
const status = await adminDb.command({ serverStatus: 1 });
console.log('📊 Database Status:');
console.log(` - Version: ${status.version}`);
console.log(` - Uptime: ${Math.floor(status.uptime / 60)} minutes`);
console.log(` - Connections: ${status.connections.current}`);
console.log(` - Memory Usage: ${(status.mem.resident / 1024 / 1024).toFixed(1)} MB`);
// Check collections
const dbNames = await adminDb.admin().listDatabases();
console.log('📦 Databases:');
dbNames.databases.forEach(db => {
if (db.name !== 'admin' && db.name !== 'local' && db.name !== 'config') {
console.log(` - ${db.name}: ${(db.sizeOnDisk / 1024 / 1024).toFixed(1)} MB`);
}
});
} catch (error) {
console.error('❌ Failed to get database status:', error);
} finally {
await client.close();
}
}
// Command line interface
if (require.main === module) {
const args = process.argv.slice(2);
const command = args[0];
switch (command) {
case 'status':
checkDatabaseStatus().catch(console.error);
break;
case 'import-matches':
if (args[1]) {
importMatchesData(args[1]).catch(console.error);
} else {
console.error('❌ Please provide a patch version');
}
break;
case 'generate-stats':
generateChampionStats().catch(console.error);
break;
case 'import-patches':
importPatchesData().catch(console.error);
break;
case 'match-count':
if (args[1]) {
getMatchCount(args[1]).then(count => console.log(`Match count: ${count}`)).catch(console.error);
} else {
console.error('❌ Please provide a patch version');
}
break;
case 'latest-patch':
getLatestPatchVersion().then(patch => console.log(`Latest patch: ${patch}`)).catch(console.error);
break;
default:
setupDatabase().catch(error => {
console.error('❌ Setup failed:', error);
process.exit(1);
});
}
}
module.exports = {
setupDatabase,
importPatchesData,
importMatchesData,
generateChampionStats,
checkDatabaseStatus,
getMatchCount,
getLatestPatchVersion
};