#!/usr/bin/env node /** * Microsoft Rewards Bot - Automatic Update System * * Uses GitHub API to download latest code as ZIP archive. * No Git required, no merge conflicts, always clean. * * Features: * - Downloads latest code from GitHub (ZIP) * - Preserves user files (accounts, config, sessions) * - Selective file copying * - Automatic dependency installation * - TypeScript rebuild * * Usage: * node scripts/installer/update.mjs # Run update * npm run start # Bot runs this automatically if enabled */ import { spawn } from 'node:child_process' import { cpSync, createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from 'node:fs' import { get as httpsGet } from 'node:https' import { dirname, join } from 'node:path' // ============================================================================= // UTILITY FUNCTIONS // ============================================================================= /** * Strip JSON comments */ function stripJsonComments(input) { let result = '' let inString = false let stringChar = '' let inLineComment = false let inBlockComment = false for (let i = 0; i < input.length; i++) { const char = input[i] const next = input[i + 1] if (inLineComment) { if (char === '\n' || char === '\r') { inLineComment = false result += char } continue } if (inBlockComment) { if (char === '*' && next === '/') { inBlockComment = false i++ } continue } if (inString) { result += char if (char === '\\') { i++ if (i < input.length) result += input[i] continue } if (char === stringChar) inString = false continue } if (char === '"' || char === '\'') { inString = true stringChar = char result += char continue } if (char === '/' && next === '/') { inLineComment = true i++ continue } if (char === '/' && next === '*') { inBlockComment = true i++ continue } result += char } return result } /** * Read and parse JSON config file */ function readJsonConfig(preferredPaths) { for (const candidate of preferredPaths) { if (!existsSync(candidate)) continue try { const raw = readFileSync(candidate, 'utf8').replace(/^\uFEFF/, '') return JSON.parse(stripJsonComments(raw)) } catch { // Try next candidate } } return null } /** * Run shell command */ function run(cmd, args, opts = {}) { return new Promise((resolve) => { const child = spawn(cmd, args, { stdio: 'inherit', shell: process.platform === 'win32', ...opts }) child.on('close', (code) => resolve(code ?? 0)) child.on('error', () => resolve(1)) }) } /** * Check if command exists */ async function which(cmd) { const probe = process.platform === 'win32' ? 'where' : 'which' const code = await run(probe, [cmd], { stdio: 'ignore' }) return code === 0 } /** * Download file via HTTPS */ function downloadFile(url, dest) { return new Promise((resolve, reject) => { const file = createWriteStream(dest) httpsGet(url, (response) => { // Handle redirects if (response.statusCode === 302 || response.statusCode === 301) { file.close() rmSync(dest, { force: true }) downloadFile(response.headers.location, dest).then(resolve).catch(reject) return } if (response.statusCode !== 200) { file.close() rmSync(dest, { force: true }) reject(new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`)) return } response.pipe(file) file.on('finish', () => { file.close() resolve() }) }).on('error', (err) => { file.close() rmSync(dest, { force: true }) reject(err) }) }) } /** * Extract ZIP file (cross-platform) */ async function extractZip(zipPath, destDir) { // Try unzip (Unix-like) if (await which('unzip')) { const code = await run('unzip', ['-q', '-o', zipPath, '-d', destDir], { stdio: 'ignore' }) if (code === 0) return } // Try tar (modern Windows/Unix) if (await which('tar')) { const code = await run('tar', ['-xf', zipPath, '-C', destDir], { stdio: 'ignore' }) if (code === 0) return } // Try PowerShell Expand-Archive (Windows) if (process.platform === 'win32') { const code = await run('powershell', [ '-Command', `Expand-Archive -Path "${zipPath}" -DestinationPath "${destDir}" -Force` ], { stdio: 'ignore' }) if (code === 0) return } throw new Error('No extraction tool found (unzip, tar, or PowerShell required)') } // ============================================================================= // ENVIRONMENT DETECTION // ============================================================================= /** * Detect if running inside a Docker container * Checks multiple indicators for accuracy */ function isDocker() { try { // Method 1: Check for /.dockerenv file (most reliable) if (existsSync('/.dockerenv')) { return true } // Method 2: Check /proc/1/cgroup for docker if (existsSync('/proc/1/cgroup')) { const cgroupContent = readFileSync('/proc/1/cgroup', 'utf8') if (cgroupContent.includes('docker') || cgroupContent.includes('/kubepods/')) { return true } } // Method 3: Check environment variables if (process.env.DOCKER === 'true' || process.env.CONTAINER === 'docker' || process.env.KUBERNETES_SERVICE_HOST) { return true } // Method 4: Check /proc/self/mountinfo for overlay filesystem if (existsSync('/proc/self/mountinfo')) { const mountinfo = readFileSync('/proc/self/mountinfo', 'utf8') if (mountinfo.includes('docker') || mountinfo.includes('overlay')) { return true } } return false } catch { // If any error occurs (e.g., on Windows), assume not Docker return false } } /** * Determine update mode based on config and environment */ function getUpdateMode(configData) { const dockerMode = configData?.update?.dockerMode || 'auto' if (dockerMode === 'force-docker') { return 'docker' } if (dockerMode === 'force-host') { return 'host' } // Auto-detect return isDocker() ? 'docker' : 'host' } // ============================================================================= // MAIN UPDATE LOGIC // ============================================================================= /** * Check if update is available by comparing versions * Uses GitHub API directly (no CDN cache issues, always fresh data) * Rate limit: 60 requests/hour (sufficient for bot updates) */ async function checkVersion() { try { // Read local version const localPkgPath = join(process.cwd(), 'package.json') if (!existsSync(localPkgPath)) { console.log('āš ļø Could not find local package.json') return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' } } const localPkg = JSON.parse(readFileSync(localPkgPath, 'utf8')) const localVersion = localPkg.version // Fetch remote version from GitHub API (no cache) const repoOwner = 'LightZirconite' const repoName = 'Microsoft-Rewards-Bot' const branch = 'main' console.log('šŸ” Checking for updates...') console.log(` Local: ${localVersion}`) // Use GitHub API directly - no CDN cache, always fresh const apiUrl = `https://api.github.com/repos/${repoOwner}/${repoName}/contents/package.json?ref=${branch}` return new Promise((resolve) => { const options = { headers: { 'User-Agent': 'Microsoft-Rewards-Bot-Updater', 'Accept': 'application/vnd.github.v3.raw', // Returns raw file content 'Cache-Control': 'no-cache' } } const request = httpsGet(apiUrl, options, (res) => { if (res.statusCode !== 200) { console.log(` āš ļø GitHub API returned HTTP ${res.statusCode}`) if (res.statusCode === 403) { console.log(' ā„¹ļø Rate limit may be exceeded (60/hour). Try again later.') } resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' }) return } let data = '' res.on('data', chunk => data += chunk) res.on('end', () => { try { const remotePkg = JSON.parse(data) const remoteVersion = remotePkg.version console.log(` Remote: ${remoteVersion}`) // Any difference triggers update (upgrade or downgrade) const updateAvailable = localVersion !== remoteVersion resolve({ updateAvailable, localVersion, remoteVersion }) } catch (err) { console.log(` āš ļø Could not parse remote package.json: ${err.message}`) resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' }) } }) }) request.on('error', (err) => { console.log(` āš ļø Network error: ${err.message}`) resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' }) }) request.setTimeout(10000, () => { request.destroy() console.log(' āš ļø Request timeout (10s)') resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' }) }) }) } catch (err) { console.log(`āš ļø Version check failed: ${err.message}`) return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' } } } /** * Perform update using GitHub API (ZIP download) */ async function performUpdate() { // Step 0: Check if update is needed by comparing versions const versionCheck = await checkVersion() if (!versionCheck.updateAvailable) { console.log(`āœ… Already up to date (v${versionCheck.localVersion})`) return 0 // Exit without creating update marker } // Step 0.5: Detect environment and determine update mode const configData = readJsonConfig([ 'src/config.jsonc', 'config.jsonc', 'src/config.json', 'config.json' ]) const updateMode = getUpdateMode(configData) const envIcon = updateMode === 'docker' ? '🐳' : 'šŸ’»' console.log(`\nšŸ“¦ Update available: ${versionCheck.localVersion} → ${versionCheck.remoteVersion}`) console.log(`${envIcon} Environment: ${updateMode === 'docker' ? 'Docker container' : 'Host system'}`) console.log('ā³ Updating... (this may take a moment)\n') // Step 1: Read user preferences (silent) const userConfig = { autoUpdateConfig: configData?.update?.autoUpdateConfig ?? false, autoUpdateAccounts: configData?.update?.autoUpdateAccounts ?? false } // Step 2: Create backups (protected files + critical for rollback) const backupDir = join(process.cwd(), '.update-backup') const rollbackDir = join(process.cwd(), '.update-rollback') // Clean previous backups rmSync(backupDir, { recursive: true, force: true }) rmSync(rollbackDir, { recursive: true, force: true }) mkdirSync(backupDir, { recursive: true }) mkdirSync(rollbackDir, { recursive: true }) const filesToProtect = [ { path: 'src/config.jsonc', protect: !userConfig.autoUpdateConfig }, { path: 'src/accounts.jsonc', protect: !userConfig.autoUpdateAccounts }, { path: 'src/accounts.json', protect: !userConfig.autoUpdateAccounts }, { path: 'sessions', protect: true, isDir: true }, { path: '.playwright-chromium-installed', protect: true } ] const backedUp = [] for (const file of filesToProtect) { if (!file.protect) continue const srcPath = join(process.cwd(), file.path) if (!existsSync(srcPath)) continue const destPath = join(backupDir, file.path) mkdirSync(dirname(destPath), { recursive: true }) try { if (file.isDir) { cpSync(srcPath, destPath, { recursive: true }) } else { writeFileSync(destPath, readFileSync(srcPath)) } backedUp.push(file) } catch { // Silent failure - continue with update } } // Backup critical files for potential rollback // FIXED: Don't backup dist/ - it must be rebuilt from new source code const criticalFiles = ['package.json', 'package-lock.json'] for (const file of criticalFiles) { const srcPath = join(process.cwd(), file) if (!existsSync(srcPath)) continue const destPath = join(rollbackDir, file) try { if (statSync(srcPath).isDirectory()) { cpSync(srcPath, destPath, { recursive: true }) } else { cpSync(srcPath, destPath) } } catch { // Continue } } // CRITICAL FIX: Delete old dist/ before update to force clean rebuild const oldDistPath = join(process.cwd(), 'dist') if (existsSync(oldDistPath)) { try { rmSync(oldDistPath, { recursive: true, force: true }) } catch { // Continue - build will overwrite anyway } } // Step 3: Download latest code from GitHub process.stdout.write('šŸ“„ Downloading...') const repoOwner = 'LightZirconite' const repoName = 'Microsoft-Rewards-Bot' const branch = 'main' const archiveUrl = `https://github.com/${repoOwner}/${repoName}/archive/refs/heads/${branch}.zip` const archivePath = join(process.cwd(), '.update-download.zip') const extractDir = join(process.cwd(), '.update-extract') try { await downloadFile(archiveUrl, archivePath) process.stdout.write(' āœ“\n') } catch (err) { console.log(` āŒ\nāŒ Download failed: ${err.message}`) return 1 } // Step 4: Extract archive process.stdout.write('šŸ“‚ Extracting...') rmSync(extractDir, { recursive: true, force: true }) mkdirSync(extractDir, { recursive: true }) try { await extractZip(archivePath, extractDir) process.stdout.write(' āœ“\n') } catch (err) { console.log(` āŒ\nāŒ Extraction failed: ${err.message}`) return 1 } // Step 5: Find extracted folder const extractedItems = readdirSync(extractDir) const extractedRepoDir = extractedItems.find(item => item.startsWith(repoName)) if (!extractedRepoDir) { console.log('\nāŒ Could not find extracted repository folder') return 1 } const sourceDir = join(extractDir, extractedRepoDir) // Step 6: Copy files selectively process.stdout.write('šŸ“¦ Updating files...') const itemsToUpdate = [ 'src', 'docs', 'setup', 'public', 'tests', 'package.json', 'package-lock.json', 'tsconfig.json', 'Dockerfile', 'compose.yaml', 'entrypoint.sh', 'run.sh', 'README.md', 'LICENSE' ] for (const item of itemsToUpdate) { const srcPath = join(sourceDir, item) const destPath = join(process.cwd(), item) if (!existsSync(srcPath)) continue // Skip protected items const isProtected = backedUp.some(f => f.path === item || destPath.includes(f.path)) if (isProtected) continue try { if (existsSync(destPath)) { rmSync(destPath, { recursive: true, force: true }) } if (statSync(srcPath).isDirectory()) { cpSync(srcPath, destPath, { recursive: true }) } else { cpSync(srcPath, destPath) } } catch { // Silent failure - continue } } process.stdout.write(' āœ“\n') // Step 7: Restore protected files (silent) if (backedUp.length > 0) { for (const file of backedUp) { const backupPath = join(backupDir, file.path) if (!existsSync(backupPath)) continue const destPath = join(process.cwd(), file.path) mkdirSync(dirname(destPath), { recursive: true }) try { if (file.isDir) { rmSync(destPath, { recursive: true, force: true }) cpSync(backupPath, destPath, { recursive: true }) } else { writeFileSync(destPath, readFileSync(backupPath)) } } catch { // Silent failure } } } // Step 8: Cleanup temporary files (silent) rmSync(archivePath, { force: true }) rmSync(extractDir, { recursive: true, force: true }) rmSync(backupDir, { recursive: true, force: true }) // Step 9: Create update marker for bot restart detection const updateMarkerPath = join(process.cwd(), '.update-happened') writeFileSync(updateMarkerPath, JSON.stringify({ timestamp: new Date().toISOString(), fromVersion: versionCheck.localVersion, toVersion: versionCheck.remoteVersion, method: 'github-api' }, null, 2)) // Step 10: Install dependencies & rebuild const hasNpm = await which('npm') if (!hasNpm) { console.log('āš ļø npm not found - please run: npm install && npm run build') return 0 } process.stdout.write('šŸ“¦ Installing dependencies...') const installCode = await run('npm', ['ci', '--silent'], { stdio: 'ignore' }) if (installCode !== 0) { await run('npm', ['install', '--silent'], { stdio: 'ignore' }) } process.stdout.write(' āœ“\n') // FIXED: Show build output to detect TypeScript errors and verify compilation process.stdout.write('šŸ”Ø Building project...\n') const buildCode = await run('npm', ['run', 'build'], { stdio: 'inherit' }) if (buildCode !== 0) { // Build failed - rollback process.stdout.write(' āŒ\n') console.log('āš ļø Build failed, rolling back to previous version...') // Restore from rollback for (const file of criticalFiles) { const srcPath = join(rollbackDir, file) const destPath = join(process.cwd(), file) if (!existsSync(srcPath)) continue try { rmSync(destPath, { recursive: true, force: true }) if (statSync(srcPath).isDirectory()) { cpSync(srcPath, destPath, { recursive: true }) } else { cpSync(srcPath, destPath) } } catch { // Continue } } console.log('āœ… Rollback complete - using previous version') rmSync(rollbackDir, { recursive: true, force: true }) return 1 } process.stdout.write(' āœ“\n') // Step 11: Verify integrity (check if critical files exist AND were recently updated) process.stdout.write('šŸ” Verifying integrity...') const criticalPaths = [ 'dist/index.js', 'package.json', 'src/index.ts' ] let integrityOk = true const buildTime = Date.now() for (const path of criticalPaths) { const fullPath = join(process.cwd(), path) if (!existsSync(fullPath)) { console.log(`\n āŒ Missing: ${path}`) integrityOk = false break } // IMPROVED: For compiled files, verify they were recently updated (within last 2 minutes) if (path.startsWith('dist/')) { try { const stats = statSync(fullPath) const fileAge = buildTime - stats.mtimeMs if (fileAge > 120000) { // 2 minutes console.log(`\n āš ļø ${path} not recently updated (${Math.round(fileAge / 1000)}s old)`) integrityOk = false break } } catch { integrityOk = false break } } } if (!integrityOk) { process.stdout.write(' āŒ\n') console.log('āš ļø Integrity check failed, rolling back...') // Restore from rollback for (const file of criticalFiles) { const srcPath = join(rollbackDir, file) const destPath = join(process.cwd(), file) if (!existsSync(srcPath)) continue try { rmSync(destPath, { recursive: true, force: true }) if (statSync(srcPath).isDirectory()) { cpSync(srcPath, destPath, { recursive: true }) } else { cpSync(srcPath, destPath) } } catch { // Continue } } console.log('āœ… Rollback complete - using previous version') rmSync(rollbackDir, { recursive: true, force: true }) return 1 } process.stdout.write(' āœ“\n') // Clean rollback backup on success rmSync(rollbackDir, { recursive: true, force: true }) console.log(`\nāœ… Updated successfully! (${versionCheck.localVersion} → ${versionCheck.remoteVersion})`) // Different behavior for Docker vs Host if (updateMode === 'docker') { console.log('ļæ½ Docker mode: Update complete') console.log(' Container will restart automatically if configured\n') // In Docker, don't restart - let orchestrator handle it // Just exit cleanly so Docker can restart the container return 0 } else { console.log('ļæ½šŸ”„ Restarting in same process...\n') // In host mode, signal restart needed return 0 } } // ============================================================================= // ENTRY POINT // ============================================================================= /** * Cleanup temporary files */ function cleanup() { const tempDirs = [ '.update-backup', '.update-rollback', '.update-extract', '.update-download.zip' ] for (const dir of tempDirs) { const path = join(process.cwd(), dir) try { if (existsSync(path)) { if (statSync(path).isDirectory()) { rmSync(path, { recursive: true, force: true }) } else { rmSync(path, { force: true }) } } } catch { // Ignore cleanup errors } } } async function main() { // Check if updates are enabled in config const configData = readJsonConfig([ 'src/config.jsonc', 'config.jsonc', 'src/config.json', 'config.json' ]) if (configData?.update?.enabled === false) { console.log('\nāš ļø Updates are disabled in config.jsonc') console.log('To enable: set "update.enabled" to true in src/config.jsonc\n') return 0 } // Global timeout: 5 minutes max const timeout = setTimeout(() => { console.error('\nā±ļø Update timeout (5 min) - cleaning up...') cleanup() process.exit(1) }, 5 * 60 * 1000) try { const code = await performUpdate() clearTimeout(timeout) // Final cleanup of temporary files cleanup() process.exit(code) } catch (err) { clearTimeout(timeout) cleanup() throw err } } main().catch((err) => { console.error('\nāŒ Update failed with error:', err) console.error('\nCleaning up and reverting...') cleanup() process.exit(1) })