Better dev experience, better front page
All checks were successful
pipeline / build-and-push-images (push) Successful in 5m30s
All checks were successful
pipeline / build-and-push-images (push) Successful in 5m30s
This commit is contained in:
442
dev/scripts/setup-db.js
Normal file
442
dev/scripts/setup-db.js
Normal file
@@ -0,0 +1,442 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const { MongoClient } = require('mongodb');
|
||||
const { execSync } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const https = require('https');
|
||||
const tar = require('tar');
|
||||
|
||||
/**
|
||||
* Main database setup script
|
||||
* Orchestrates the complete data import and stats generation process
|
||||
*/
|
||||
async function setupDatabase() {
|
||||
console.log('🚀 Starting BuildPath database setup...');
|
||||
console.log('=====================================');
|
||||
|
||||
// 1. Get latest patch version
|
||||
const latestPatch = await getLatestPatchVersion();
|
||||
console.log(`🎯 Latest patch version: ${latestPatch}`);
|
||||
|
||||
// 2. Check if data directory exists and has files
|
||||
console.log('🔍 Checking for data files...');
|
||||
const dataDir = path.join(__dirname, '../data');
|
||||
const dataFiles = [
|
||||
{ path: 'patches.json', required: true, description: 'Patches data' },
|
||||
{ path: `${latestPatch}_matches.json`, required: true, description: 'Match data' }
|
||||
];
|
||||
|
||||
// Create data directory if it doesn't exist
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
}
|
||||
|
||||
let filesExist = true;
|
||||
for (const file of dataFiles) {
|
||||
const fullPath = path.join(dataDir, file.path);
|
||||
if (file.required && !fs.existsSync(fullPath)) {
|
||||
filesExist = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!filesExist) {
|
||||
console.log('📥 No data files found. Downloading latest snapshot...');
|
||||
await downloadAndExtractSnapshot();
|
||||
} else {
|
||||
console.log('✅ Data files found');
|
||||
for (const file of dataFiles) {
|
||||
const fullPath = path.join(dataDir, file.path);
|
||||
const stats = fs.statSync(fullPath);
|
||||
const size = (stats.size / (1024 * 1024 * 1024)).toFixed(2);
|
||||
console.log(`✅ Found ${file.description}: ${size} GB`);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Start MongoDB if not running
|
||||
console.log('🔄 Ensuring MongoDB is running...');
|
||||
try {
|
||||
execSync('docker compose up -d mongodb', {
|
||||
stdio: 'inherit',
|
||||
cwd: path.join(__dirname, '..')
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('MongoDB service status:', error.message);
|
||||
}
|
||||
|
||||
// 4. Wait for MongoDB to be ready
|
||||
await waitForMongoDB();
|
||||
|
||||
// 5. Import patches data
|
||||
console.log('📦 Importing patches data...');
|
||||
await importPatchesData();
|
||||
|
||||
// 6. Check existing matches count and import if needed
|
||||
console.log('Checking existing matches count...');
|
||||
const matchCount = await getMatchCount(latestPatch);
|
||||
console.log(`📊 Current matches in database: ${matchCount}`);
|
||||
|
||||
if (matchCount < 100) {
|
||||
console.log('📥 Importing matches (this may take a while)...');
|
||||
await importMatchesData(latestPatch);
|
||||
} else {
|
||||
console.log('✅ Skipping matches import - sufficient data already present');
|
||||
}
|
||||
|
||||
// 7. Run match collector to generate stats
|
||||
console.log('📊 Generating champion stats...');
|
||||
await generateChampionStats();
|
||||
|
||||
console.log('🎉 Database setup complete!');
|
||||
console.log('=====================================');
|
||||
console.log('📊 Your development database is ready!');
|
||||
console.log('🔗 Connect to MongoDB: mongodb://root:password@localhost:27017');
|
||||
console.log('🌐 Access Mongo Express: http://localhost:8081');
|
||||
}
|
||||
|
||||
async function getLatestPatchVersion() {
|
||||
try {
|
||||
const filePath = path.join(__dirname, '../data/patches.json');
|
||||
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Check if it's line-delimited JSON or array format
|
||||
let patchesData;
|
||||
if (fileContent.trim().startsWith('[')) {
|
||||
// Array format
|
||||
patchesData = JSON.parse(fileContent);
|
||||
if (!Array.isArray(patchesData)) {
|
||||
throw new Error('Patches data should be an array');
|
||||
}
|
||||
} else {
|
||||
// Line-delimited JSON format
|
||||
patchesData = fileContent.split('\n')
|
||||
.filter(line => line.trim() !== '')
|
||||
.map(line => JSON.parse(line));
|
||||
}
|
||||
|
||||
// Convert dates to Date objects for proper sorting
|
||||
patchesData = patchesData.map(patch => ({
|
||||
...patch,
|
||||
date: new Date(patch.date.$date || patch.date)
|
||||
}));
|
||||
|
||||
// Sort patches by date (newest first) and get the latest
|
||||
const sortedPatches = patchesData.sort((a, b) => b.date - a.date);
|
||||
const latestPatch = sortedPatches[0];
|
||||
|
||||
if (!latestPatch || !latestPatch.patch) {
|
||||
throw new Error('Could not find patch version in patches data');
|
||||
}
|
||||
|
||||
return latestPatch.patch;
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to get latest patch version:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadAndExtractSnapshot() {
|
||||
const snapshotUrl = 'https://vhaudiquet.fr/public/buildpath-dev-snapshot.tar.xz';
|
||||
const dataDir = path.join(__dirname, '../data');
|
||||
const tempFile = path.join(dataDir, 'buildpath-dev-snapshot.tar.xz');
|
||||
const extractDir = dataDir;
|
||||
|
||||
console.log('📥 Downloading snapshot...');
|
||||
|
||||
// Download the file
|
||||
await new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(tempFile);
|
||||
https.get(snapshotUrl, (response) => {
|
||||
if (response.statusCode !== 200) {
|
||||
reject(new Error(`Failed to download snapshot: ${response.statusCode}`));
|
||||
return;
|
||||
}
|
||||
|
||||
response.pipe(file);
|
||||
file.on('finish', () => {
|
||||
file.close();
|
||||
resolve();
|
||||
});
|
||||
}).on('error', (error) => {
|
||||
fs.unlink(tempFile, () => {});
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
console.log('✅ Download complete. Extracting...');
|
||||
|
||||
// Extract the tar.xz file
|
||||
await tar.x({
|
||||
file: tempFile,
|
||||
cwd: extractDir,
|
||||
strip: 1
|
||||
});
|
||||
|
||||
// Clean up the downloaded file
|
||||
fs.unlinkSync(tempFile);
|
||||
|
||||
console.log('✅ Extraction complete');
|
||||
}
|
||||
|
||||
async function waitForMongoDB() {
|
||||
const client = new MongoClient(getMongoUri());
|
||||
let retries = 30;
|
||||
|
||||
while (retries > 0) {
|
||||
try {
|
||||
await client.connect();
|
||||
await client.db('admin').command({ ping: 1 });
|
||||
await client.close();
|
||||
console.log('✅ MongoDB is ready');
|
||||
return;
|
||||
} catch (error) {
|
||||
retries--;
|
||||
if (retries === 0) {
|
||||
console.error('❌ Failed to connect to MongoDB after multiple attempts');
|
||||
throw error;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||
console.log(`Waiting for MongoDB... (${retries} retries left)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function importPatchesData() {
|
||||
const client = new MongoClient(getMongoUri());
|
||||
await client.connect();
|
||||
|
||||
try {
|
||||
const filePath = path.join(__dirname, '../data/patches.json');
|
||||
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Check if it's line-delimited JSON or array format
|
||||
let patchesData;
|
||||
if (fileContent.trim().startsWith('[')) {
|
||||
// Array format
|
||||
patchesData = JSON.parse(fileContent);
|
||||
if (!Array.isArray(patchesData)) {
|
||||
throw new Error('Patches data should be an array');
|
||||
}
|
||||
} else {
|
||||
// Line-delimited JSON format
|
||||
patchesData = fileContent.split('\n')
|
||||
.filter(line => line.trim() !== '')
|
||||
.map(line => {
|
||||
const doc = JSON.parse(line);
|
||||
return convertMongoExtendedJson(doc);
|
||||
});
|
||||
}
|
||||
|
||||
// Convert any extended JSON in array format too
|
||||
if (Array.isArray(patchesData)) {
|
||||
patchesData = patchesData.map(doc => convertMongoExtendedJson(doc));
|
||||
}
|
||||
|
||||
const db = client.db('patches');
|
||||
const collection = db.collection('patches');
|
||||
|
||||
// Clear existing data
|
||||
await collection.deleteMany({});
|
||||
|
||||
// Insert new data
|
||||
const result = await collection.insertMany(patchesData);
|
||||
console.log(`✅ Imported ${result.insertedCount} patches`);
|
||||
|
||||
// Create index
|
||||
await collection.createIndex({ date: -1 });
|
||||
console.log('✅ Created patches index');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to import patches:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function importMatchesData(patchVersion) {
|
||||
const matchesFile = path.join(__dirname, '../data', `${patchVersion}_matches.json`);
|
||||
const collectionName = patchVersion;
|
||||
|
||||
try {
|
||||
const result = execSync(
|
||||
`node ${path.join(__dirname, 'process-matches.js')} ${matchesFile} ${collectionName} 1000`,
|
||||
{
|
||||
stdio: 'inherit',
|
||||
env: { ...process.env, MONGO_URI: getMongoUri() }
|
||||
}
|
||||
);
|
||||
console.log('✅ Matches import completed');
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to import matches:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function generateChampionStats() {
|
||||
try {
|
||||
console.log('🔄 Running match collector...');
|
||||
|
||||
// Set environment variables for development mode
|
||||
const env = {
|
||||
...process.env,
|
||||
NODE_ENV: 'development',
|
||||
USE_IMPORTED_DATA: 'true',
|
||||
MONGO_URI: getMongoUri(),
|
||||
MONGO_USER: 'root',
|
||||
MONGO_PASS: 'password',
|
||||
MONGO_HOST: 'localhost'
|
||||
};
|
||||
|
||||
// Run the match collector directly with tsx (TypeScript executor) instead of docker compose
|
||||
const matchCollectorPath = path.join(__dirname, '../../match_collector/index.ts');
|
||||
execSync(`npx tsx ${matchCollectorPath}`, {
|
||||
stdio: 'inherit',
|
||||
env: env,
|
||||
cwd: path.join(__dirname, '../..')
|
||||
});
|
||||
|
||||
console.log('✅ Champion stats generated');
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to generate champion stats:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function getMatchCount(patchVersion) {
|
||||
const client = new MongoClient(getMongoUri());
|
||||
await client.connect();
|
||||
|
||||
try {
|
||||
const db = client.db('matches');
|
||||
const collection = db.collection(patchVersion);
|
||||
const count = await collection.countDocuments();
|
||||
return count;
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to get match count:', error);
|
||||
return 0;
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
}
|
||||
|
||||
function getMongoUri() {
|
||||
return process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin';
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert MongoDB extended JSON format to standard MongoDB objects
|
||||
* Handles $oid, $date, and other extended JSON operators
|
||||
*/
|
||||
function convertMongoExtendedJson(doc) {
|
||||
if (!doc || typeof doc !== 'object') {
|
||||
return doc;
|
||||
}
|
||||
|
||||
// Handle ObjectId
|
||||
if (doc._id && doc._id.$oid) {
|
||||
doc._id = new ObjectId(doc._id.$oid);
|
||||
}
|
||||
|
||||
// Handle Date
|
||||
if (doc.date && doc.date.$date) {
|
||||
doc.date = new Date(doc.date.$date);
|
||||
}
|
||||
|
||||
// Recursively process nested objects
|
||||
for (const key in doc) {
|
||||
if (doc[key] && typeof doc[key] === 'object') {
|
||||
if (Array.isArray(doc[key])) {
|
||||
doc[key] = doc[key].map(item => convertMongoExtendedJson(item));
|
||||
} else {
|
||||
doc[key] = convertMongoExtendedJson(doc[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
// MongoDB ObjectId for extended JSON conversion
|
||||
const { ObjectId } = require('mongodb');
|
||||
|
||||
// Additional utility functions
|
||||
async function checkDatabaseStatus() {
|
||||
const client = new MongoClient(getMongoUri());
|
||||
try {
|
||||
await client.connect();
|
||||
const adminDb = client.db('admin');
|
||||
const status = await adminDb.command({ serverStatus: 1 });
|
||||
|
||||
console.log('📊 Database Status:');
|
||||
console.log(` - Version: ${status.version}`);
|
||||
console.log(` - Uptime: ${Math.floor(status.uptime / 60)} minutes`);
|
||||
console.log(` - Connections: ${status.connections.current}`);
|
||||
console.log(` - Memory Usage: ${(status.mem.resident / 1024 / 1024).toFixed(1)} MB`);
|
||||
|
||||
// Check collections
|
||||
const dbNames = await adminDb.admin().listDatabases();
|
||||
console.log('📦 Databases:');
|
||||
dbNames.databases.forEach(db => {
|
||||
if (db.name !== 'admin' && db.name !== 'local' && db.name !== 'config') {
|
||||
console.log(` - ${db.name}: ${(db.sizeOnDisk / 1024 / 1024).toFixed(1)} MB`);
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to get database status:', error);
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Command line interface
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
const command = args[0];
|
||||
|
||||
switch (command) {
|
||||
case 'status':
|
||||
checkDatabaseStatus().catch(console.error);
|
||||
break;
|
||||
case 'import-matches':
|
||||
if (args[1]) {
|
||||
importMatchesData(args[1]).catch(console.error);
|
||||
} else {
|
||||
console.error('❌ Please provide a patch version');
|
||||
}
|
||||
break;
|
||||
case 'generate-stats':
|
||||
generateChampionStats().catch(console.error);
|
||||
break;
|
||||
case 'import-patches':
|
||||
importPatchesData().catch(console.error);
|
||||
break;
|
||||
case 'match-count':
|
||||
if (args[1]) {
|
||||
getMatchCount(args[1]).then(count => console.log(`Match count: ${count}`)).catch(console.error);
|
||||
} else {
|
||||
console.error('❌ Please provide a patch version');
|
||||
}
|
||||
break;
|
||||
case 'latest-patch':
|
||||
getLatestPatchVersion().then(patch => console.log(`Latest patch: ${patch}`)).catch(console.error);
|
||||
break;
|
||||
default:
|
||||
setupDatabase().catch(error => {
|
||||
console.error('❌ Setup failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setupDatabase,
|
||||
importPatchesData,
|
||||
importMatchesData,
|
||||
generateChampionStats,
|
||||
checkDatabaseStatus,
|
||||
getMatchCount,
|
||||
getLatestPatchVersion
|
||||
};
|
||||
Reference in New Issue
Block a user