diff --git a/README.md b/README.md index 96ef0b3..2f7f1ec 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,22 @@ BuildPath is made of four components: ### Getting started Developing BuildPath requires Docker for a local MongoDB instance, and a NodeJS/npm installation. +Then, for the first-time setup: +```bash +# Install npm and node +sudo apt install npm nodejs # Ubuntu +sudo pacman -S npm nodejs # Arch +sudo dnf install nodejs # Fedora + +# Install docker. Follow instructions on +# https://docs.docker.com/engine/install/ +# .. and don't forget docker compose + +cd dev && npm i && cd .. # Install dependencies for the dev environment +cd frontend && npm i && cd .. # Install dependencies for frontend +cd match_collector && npm i && cd .. # Install dependencies for match_collector +cd patch_detector && npm i && cd .. # Install dependencies for patch_detector +``` BuildPath needs data to work, either for generating statistics in the `match_collector` or for the frontend. diff --git a/dev/package.json b/dev/package.json index 811fbd8..d8b9c41 100644 --- a/dev/package.json +++ b/dev/package.json @@ -3,6 +3,7 @@ "version": "1.0.0", "description": "Development tools for BuildPath database setup", "main": "scripts/setup-db.js", + "type": "module", "scripts": { "setup": "node scripts/setup-db.js", "import-matches": "node scripts/setup-db.js import-matches", diff --git a/dev/scripts/process-matches.js b/dev/scripts/process-matches.js index d255e2f..7d3b6c0 100644 --- a/dev/scripts/process-matches.js +++ b/dev/scripts/process-matches.js @@ -1,10 +1,14 @@ #!/usr/bin/env node -const { MongoClient, ObjectId } = require('mongodb'); -const fs = require('fs'); -const { createReadStream } = require('fs'); -const { createInterface } = require('readline'); -const path = require('path'); +import { MongoClient, ObjectId } from 'mongodb'; +import fs from 'fs'; +import { createReadStream } from 'fs'; +import { createInterface } from 'readline'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); /** * Stream-based import of large JSON files @@ -121,7 +125,7 @@ async function importLargeJsonFile(filePath, collectionName, batchSize = 1000) { } // Run the import if called directly -if (require.main === module) { +if (import.meta.url === `file://${process.argv[1]}`) { const args = process.argv.slice(2); if (args.length < 2) { console.log('Usage: node process-matches.js [batch-size]'); @@ -141,4 +145,4 @@ if (require.main === module) { }); } -module.exports = { importLargeJsonFile }; +export { importLargeJsonFile }; diff --git a/dev/scripts/setup-db.js b/dev/scripts/setup-db.js index e2c5514..b46eaf1 100644 --- a/dev/scripts/setup-db.js +++ b/dev/scripts/setup-db.js @@ -1,11 +1,14 @@ #!/usr/bin/env node -const { MongoClient } = require('mongodb'); -const { execSync } = require('child_process'); -const path = require('path'); -const fs = require('fs'); -const https = require('https'); -const tar = require('tar'); +import { MongoClient, ObjectId } from 'mongodb'; +import { execSync } from 'child_process'; +import path from 'path'; +import fs from 'fs'; +import https from 'https'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); /** * Main database setup script @@ -15,23 +18,26 @@ async function setupDatabase() { console.log('🚀 Starting BuildPath database setup...'); console.log('====================================='); - // 1. Get latest patch version + // Check if data directory exists and has files + const dataDir = path.join(__dirname, '../data'); + const patchFile = path.join(dataDir, "patches.json"); + if(!fs.existsSync(dataDir) || !fs.existsSync(patchFile)) { + fs.mkdirSync(dataDir, { recursive: true }); + console.log('📥 No data files found. Downloading latest snapshot...'); + await downloadAndExtractSnapshot(); + } + + // Get latest patch version const latestPatch = await getLatestPatchVersion(); console.log(`🎯 Latest patch version: ${latestPatch}`); - // 2. Check if data directory exists and has files + // Check if data directory exists and has files console.log('🔍 Checking for data files...'); - const dataDir = path.join(__dirname, '../data'); const dataFiles = [ { path: 'patches.json', required: true, description: 'Patches data' }, { path: `${latestPatch}_matches.json`, required: true, description: 'Match data' } ]; - // Create data directory if it doesn't exist - if (!fs.existsSync(dataDir)) { - fs.mkdirSync(dataDir, { recursive: true }); - } - let filesExist = true; for (const file of dataFiles) { const fullPath = path.join(dataDir, file.path); @@ -88,6 +94,9 @@ async function setupDatabase() { console.log('📊 Generating champion stats...'); await generateChampionStats(); + // Create .env file in frontend with default MongoDB connection + createFrontendEnvFile(); + console.log('🎉 Database setup complete!'); console.log('====================================='); console.log('📊 Your development database is ready!'); @@ -98,6 +107,10 @@ async function setupDatabase() { async function getLatestPatchVersion() { try { const filePath = path.join(__dirname, '../data/patches.json'); + if(!fs.existsSync(filePath)) { + return null; + } + const fileContent = fs.readFileSync(filePath, 'utf8'); // Check if it's line-delimited JSON or array format @@ -142,7 +155,7 @@ async function downloadAndExtractSnapshot() { const tempFile = path.join(dataDir, 'buildpath-dev-snapshot.tar.xz'); const extractDir = dataDir; - console.log('📥 Downloading snapshot...'); + console.log(`📥 Downloading snapshot to ${tempFile}...`); // Download the file await new Promise((resolve, reject) => { @@ -166,11 +179,9 @@ async function downloadAndExtractSnapshot() { console.log('✅ Download complete. Extracting...'); - // Extract the tar.xz file - await tar.x({ - file: tempFile, - cwd: extractDir, - strip: 1 + // Extract the tar.xz file using system tar command + execSync(`tar -xJf ${tempFile} -C ${extractDir}`, { + stdio: 'inherit' }); // Clean up the downloaded file @@ -233,15 +244,22 @@ async function importPatchesData() { patchesData = patchesData.map(doc => convertMongoExtendedJson(doc)); } + // Sort patches by date (newest first) + patchesData.sort((a, b) => { + const dateA = new Date(a.date || a.date.$date || 0); + const dateB = new Date(b.date || b.date.$date || 0); + return dateB - dateA; // Descending order (newest first) + }); + const db = client.db('patches'); const collection = db.collection('patches'); // Clear existing data await collection.deleteMany({}); - // Insert new data + // Insert sorted data const result = await collection.insertMany(patchesData); - console.log(`✅ Imported ${result.insertedCount} patches`); + console.log(`✅ Imported ${result.insertedCount} patches (sorted by date)`); // Create index await collection.createIndex({ date: -1 }); @@ -325,6 +343,35 @@ function getMongoUri() { return process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin'; } +/** + * Create .env file in frontend directory with default MongoDB connection + */ +function createFrontendEnvFile() { + try { + const frontendDir = path.join(__dirname, '../../frontend'); + const envFile = path.join(frontendDir, '.env'); + + // Check if .env file already exists + if (fs.existsSync(envFile)) { + console.log('✅ .env file already exists in frontend directory'); + return; + } + + // Create .env content with default MongoDB connection + const envContent = `MONGO_USER=root +MONGO_PASS=password +MONGO_HOST=localhost +`; + + // Write the .env file + fs.writeFileSync(envFile, envContent, 'utf8'); + console.log('✅ Created .env file in frontend directory with default MongoDB connection'); + + } catch (error) { + console.error('❌ Failed to create .env file:', error); + } +} + /** * Convert MongoDB extended JSON format to standard MongoDB objects * Handles $oid, $date, and other extended JSON operators @@ -349,7 +396,7 @@ function convertMongoExtendedJson(doc) { if (doc[key] && typeof doc[key] === 'object') { if (Array.isArray(doc[key])) { doc[key] = doc[key].map(item => convertMongoExtendedJson(item)); - } else { + } else if(key != "parent") { doc[key] = convertMongoExtendedJson(doc[key]); } } @@ -358,9 +405,6 @@ function convertMongoExtendedJson(doc) { return doc; } -// MongoDB ObjectId for extended JSON conversion -const { ObjectId } = require('mongodb'); - // Additional utility functions async function checkDatabaseStatus() { const client = new MongoClient(getMongoUri()); @@ -392,7 +436,7 @@ async function checkDatabaseStatus() { } // Command line interface -if (require.main === module) { +if (import.meta.url === `file://${process.argv[1]}`) { const args = process.argv.slice(2); const command = args[0]; @@ -431,7 +475,7 @@ if (require.main === module) { } } -module.exports = { +export { setupDatabase, importPatchesData, importMatchesData,