dev: update dev script for auto-download
This commit is contained in:
16
README.md
16
README.md
@@ -16,6 +16,22 @@ BuildPath is made of four components:
|
|||||||
### Getting started
|
### Getting started
|
||||||
|
|
||||||
Developing BuildPath requires Docker for a local MongoDB instance, and a NodeJS/npm installation.
|
Developing BuildPath requires Docker for a local MongoDB instance, and a NodeJS/npm installation.
|
||||||
|
Then, for the first-time setup:
|
||||||
|
```bash
|
||||||
|
# Install npm and node
|
||||||
|
sudo apt install npm nodejs # Ubuntu
|
||||||
|
sudo pacman -S npm nodejs # Arch
|
||||||
|
sudo dnf install nodejs # Fedora
|
||||||
|
|
||||||
|
# Install docker. Follow instructions on
|
||||||
|
# https://docs.docker.com/engine/install/
|
||||||
|
# .. and don't forget docker compose
|
||||||
|
|
||||||
|
cd dev && npm i && cd .. # Install dependencies for the dev environment
|
||||||
|
cd frontend && npm i && cd .. # Install dependencies for frontend
|
||||||
|
cd match_collector && npm i && cd .. # Install dependencies for match_collector
|
||||||
|
cd patch_detector && npm i && cd .. # Install dependencies for patch_detector
|
||||||
|
```
|
||||||
|
|
||||||
BuildPath needs data to work, either for generating statistics in the `match_collector` or for the frontend.
|
BuildPath needs data to work, either for generating statistics in the `match_collector` or for the frontend.
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "Development tools for BuildPath database setup",
|
"description": "Development tools for BuildPath database setup",
|
||||||
"main": "scripts/setup-db.js",
|
"main": "scripts/setup-db.js",
|
||||||
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"setup": "node scripts/setup-db.js",
|
"setup": "node scripts/setup-db.js",
|
||||||
"import-matches": "node scripts/setup-db.js import-matches",
|
"import-matches": "node scripts/setup-db.js import-matches",
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
const { MongoClient, ObjectId } = require('mongodb');
|
import { MongoClient, ObjectId } from 'mongodb';
|
||||||
const fs = require('fs');
|
import fs from 'fs';
|
||||||
const { createReadStream } = require('fs');
|
import { createReadStream } from 'fs';
|
||||||
const { createInterface } = require('readline');
|
import { createInterface } from 'readline';
|
||||||
const path = require('path');
|
import path from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stream-based import of large JSON files
|
* Stream-based import of large JSON files
|
||||||
@@ -121,7 +125,7 @@ async function importLargeJsonFile(filePath, collectionName, batchSize = 1000) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Run the import if called directly
|
// Run the import if called directly
|
||||||
if (require.main === module) {
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||||
const args = process.argv.slice(2);
|
const args = process.argv.slice(2);
|
||||||
if (args.length < 2) {
|
if (args.length < 2) {
|
||||||
console.log('Usage: node process-matches.js <file-path> <collection-name> [batch-size]');
|
console.log('Usage: node process-matches.js <file-path> <collection-name> [batch-size]');
|
||||||
@@ -141,4 +145,4 @@ if (require.main === module) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { importLargeJsonFile };
|
export { importLargeJsonFile };
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
const { MongoClient } = require('mongodb');
|
import { MongoClient, ObjectId } from 'mongodb';
|
||||||
const { execSync } = require('child_process');
|
import { execSync } from 'child_process';
|
||||||
const path = require('path');
|
import path from 'path';
|
||||||
const fs = require('fs');
|
import fs from 'fs';
|
||||||
const https = require('https');
|
import https from 'https';
|
||||||
const tar = require('tar');
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Main database setup script
|
* Main database setup script
|
||||||
@@ -15,23 +18,26 @@ async function setupDatabase() {
|
|||||||
console.log('🚀 Starting BuildPath database setup...');
|
console.log('🚀 Starting BuildPath database setup...');
|
||||||
console.log('=====================================');
|
console.log('=====================================');
|
||||||
|
|
||||||
// 1. Get latest patch version
|
// Check if data directory exists and has files
|
||||||
|
const dataDir = path.join(__dirname, '../data');
|
||||||
|
const patchFile = path.join(dataDir, "patches.json");
|
||||||
|
if(!fs.existsSync(dataDir) || !fs.existsSync(patchFile)) {
|
||||||
|
fs.mkdirSync(dataDir, { recursive: true });
|
||||||
|
console.log('📥 No data files found. Downloading latest snapshot...');
|
||||||
|
await downloadAndExtractSnapshot();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get latest patch version
|
||||||
const latestPatch = await getLatestPatchVersion();
|
const latestPatch = await getLatestPatchVersion();
|
||||||
console.log(`🎯 Latest patch version: ${latestPatch}`);
|
console.log(`🎯 Latest patch version: ${latestPatch}`);
|
||||||
|
|
||||||
// 2. Check if data directory exists and has files
|
// Check if data directory exists and has files
|
||||||
console.log('🔍 Checking for data files...');
|
console.log('🔍 Checking for data files...');
|
||||||
const dataDir = path.join(__dirname, '../data');
|
|
||||||
const dataFiles = [
|
const dataFiles = [
|
||||||
{ path: 'patches.json', required: true, description: 'Patches data' },
|
{ path: 'patches.json', required: true, description: 'Patches data' },
|
||||||
{ path: `${latestPatch}_matches.json`, required: true, description: 'Match data' }
|
{ path: `${latestPatch}_matches.json`, required: true, description: 'Match data' }
|
||||||
];
|
];
|
||||||
|
|
||||||
// Create data directory if it doesn't exist
|
|
||||||
if (!fs.existsSync(dataDir)) {
|
|
||||||
fs.mkdirSync(dataDir, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
let filesExist = true;
|
let filesExist = true;
|
||||||
for (const file of dataFiles) {
|
for (const file of dataFiles) {
|
||||||
const fullPath = path.join(dataDir, file.path);
|
const fullPath = path.join(dataDir, file.path);
|
||||||
@@ -88,6 +94,9 @@ async function setupDatabase() {
|
|||||||
console.log('📊 Generating champion stats...');
|
console.log('📊 Generating champion stats...');
|
||||||
await generateChampionStats();
|
await generateChampionStats();
|
||||||
|
|
||||||
|
// Create .env file in frontend with default MongoDB connection
|
||||||
|
createFrontendEnvFile();
|
||||||
|
|
||||||
console.log('🎉 Database setup complete!');
|
console.log('🎉 Database setup complete!');
|
||||||
console.log('=====================================');
|
console.log('=====================================');
|
||||||
console.log('📊 Your development database is ready!');
|
console.log('📊 Your development database is ready!');
|
||||||
@@ -98,6 +107,10 @@ async function setupDatabase() {
|
|||||||
async function getLatestPatchVersion() {
|
async function getLatestPatchVersion() {
|
||||||
try {
|
try {
|
||||||
const filePath = path.join(__dirname, '../data/patches.json');
|
const filePath = path.join(__dirname, '../data/patches.json');
|
||||||
|
if(!fs.existsSync(filePath)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
const fileContent = fs.readFileSync(filePath, 'utf8');
|
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
// Check if it's line-delimited JSON or array format
|
// Check if it's line-delimited JSON or array format
|
||||||
@@ -142,7 +155,7 @@ async function downloadAndExtractSnapshot() {
|
|||||||
const tempFile = path.join(dataDir, 'buildpath-dev-snapshot.tar.xz');
|
const tempFile = path.join(dataDir, 'buildpath-dev-snapshot.tar.xz');
|
||||||
const extractDir = dataDir;
|
const extractDir = dataDir;
|
||||||
|
|
||||||
console.log('📥 Downloading snapshot...');
|
console.log(`📥 Downloading snapshot to ${tempFile}...`);
|
||||||
|
|
||||||
// Download the file
|
// Download the file
|
||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
@@ -166,11 +179,9 @@ async function downloadAndExtractSnapshot() {
|
|||||||
|
|
||||||
console.log('✅ Download complete. Extracting...');
|
console.log('✅ Download complete. Extracting...');
|
||||||
|
|
||||||
// Extract the tar.xz file
|
// Extract the tar.xz file using system tar command
|
||||||
await tar.x({
|
execSync(`tar -xJf ${tempFile} -C ${extractDir}`, {
|
||||||
file: tempFile,
|
stdio: 'inherit'
|
||||||
cwd: extractDir,
|
|
||||||
strip: 1
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Clean up the downloaded file
|
// Clean up the downloaded file
|
||||||
@@ -233,15 +244,22 @@ async function importPatchesData() {
|
|||||||
patchesData = patchesData.map(doc => convertMongoExtendedJson(doc));
|
patchesData = patchesData.map(doc => convertMongoExtendedJson(doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort patches by date (newest first)
|
||||||
|
patchesData.sort((a, b) => {
|
||||||
|
const dateA = new Date(a.date || a.date.$date || 0);
|
||||||
|
const dateB = new Date(b.date || b.date.$date || 0);
|
||||||
|
return dateB - dateA; // Descending order (newest first)
|
||||||
|
});
|
||||||
|
|
||||||
const db = client.db('patches');
|
const db = client.db('patches');
|
||||||
const collection = db.collection('patches');
|
const collection = db.collection('patches');
|
||||||
|
|
||||||
// Clear existing data
|
// Clear existing data
|
||||||
await collection.deleteMany({});
|
await collection.deleteMany({});
|
||||||
|
|
||||||
// Insert new data
|
// Insert sorted data
|
||||||
const result = await collection.insertMany(patchesData);
|
const result = await collection.insertMany(patchesData);
|
||||||
console.log(`✅ Imported ${result.insertedCount} patches`);
|
console.log(`✅ Imported ${result.insertedCount} patches (sorted by date)`);
|
||||||
|
|
||||||
// Create index
|
// Create index
|
||||||
await collection.createIndex({ date: -1 });
|
await collection.createIndex({ date: -1 });
|
||||||
@@ -325,6 +343,35 @@ function getMongoUri() {
|
|||||||
return process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin';
|
return process.env.MONGO_URI || 'mongodb://root:password@localhost:27017/buildpath?authSource=admin';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create .env file in frontend directory with default MongoDB connection
|
||||||
|
*/
|
||||||
|
function createFrontendEnvFile() {
|
||||||
|
try {
|
||||||
|
const frontendDir = path.join(__dirname, '../../frontend');
|
||||||
|
const envFile = path.join(frontendDir, '.env');
|
||||||
|
|
||||||
|
// Check if .env file already exists
|
||||||
|
if (fs.existsSync(envFile)) {
|
||||||
|
console.log('✅ .env file already exists in frontend directory');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create .env content with default MongoDB connection
|
||||||
|
const envContent = `MONGO_USER=root
|
||||||
|
MONGO_PASS=password
|
||||||
|
MONGO_HOST=localhost
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Write the .env file
|
||||||
|
fs.writeFileSync(envFile, envContent, 'utf8');
|
||||||
|
console.log('✅ Created .env file in frontend directory with default MongoDB connection');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to create .env file:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert MongoDB extended JSON format to standard MongoDB objects
|
* Convert MongoDB extended JSON format to standard MongoDB objects
|
||||||
* Handles $oid, $date, and other extended JSON operators
|
* Handles $oid, $date, and other extended JSON operators
|
||||||
@@ -349,7 +396,7 @@ function convertMongoExtendedJson(doc) {
|
|||||||
if (doc[key] && typeof doc[key] === 'object') {
|
if (doc[key] && typeof doc[key] === 'object') {
|
||||||
if (Array.isArray(doc[key])) {
|
if (Array.isArray(doc[key])) {
|
||||||
doc[key] = doc[key].map(item => convertMongoExtendedJson(item));
|
doc[key] = doc[key].map(item => convertMongoExtendedJson(item));
|
||||||
} else {
|
} else if(key != "parent") {
|
||||||
doc[key] = convertMongoExtendedJson(doc[key]);
|
doc[key] = convertMongoExtendedJson(doc[key]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -358,9 +405,6 @@ function convertMongoExtendedJson(doc) {
|
|||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
// MongoDB ObjectId for extended JSON conversion
|
|
||||||
const { ObjectId } = require('mongodb');
|
|
||||||
|
|
||||||
// Additional utility functions
|
// Additional utility functions
|
||||||
async function checkDatabaseStatus() {
|
async function checkDatabaseStatus() {
|
||||||
const client = new MongoClient(getMongoUri());
|
const client = new MongoClient(getMongoUri());
|
||||||
@@ -392,7 +436,7 @@ async function checkDatabaseStatus() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Command line interface
|
// Command line interface
|
||||||
if (require.main === module) {
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||||
const args = process.argv.slice(2);
|
const args = process.argv.slice(2);
|
||||||
const command = args[0];
|
const command = args[0];
|
||||||
|
|
||||||
@@ -431,7 +475,7 @@ if (require.main === module) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export {
|
||||||
setupDatabase,
|
setupDatabase,
|
||||||
importPatchesData,
|
importPatchesData,
|
||||||
importMatchesData,
|
importMatchesData,
|
||||||
|
|||||||
Reference in New Issue
Block a user