mirror of
https://github.com/LightZirconite/Microsoft-Rewards-Bot.git
synced 2026-01-10 09:16:16 +00:00
New structure 2
This commit is contained in:
742
scripts/installer/update.mjs
Normal file
742
scripts/installer/update.mjs
Normal file
@@ -0,0 +1,742 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Microsoft Rewards Bot - Automatic Update System
|
||||
*
|
||||
* Uses GitHub API to download latest code as ZIP archive.
|
||||
* No Git required, no merge conflicts, always clean.
|
||||
*
|
||||
* Features:
|
||||
* - Downloads latest code from GitHub (ZIP)
|
||||
* - Preserves user files (accounts, config, sessions)
|
||||
* - Selective file copying
|
||||
* - Automatic dependency installation
|
||||
* - TypeScript rebuild
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/installer/update.mjs # Run update
|
||||
* npm run start # Bot runs this automatically if enabled
|
||||
*/
|
||||
|
||||
import { spawn } from 'node:child_process'
|
||||
import { cpSync, createWriteStream, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from 'node:fs'
|
||||
import { get as httpsGet } from 'node:https'
|
||||
import { dirname, join } from 'node:path'
|
||||
|
||||
// =============================================================================
|
||||
// UTILITY FUNCTIONS
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Strip JSON comments
|
||||
*/
|
||||
function stripJsonComments(input) {
|
||||
let result = ''
|
||||
let inString = false
|
||||
let stringChar = ''
|
||||
let inLineComment = false
|
||||
let inBlockComment = false
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i]
|
||||
const next = input[i + 1]
|
||||
|
||||
if (inLineComment) {
|
||||
if (char === '\n' || char === '\r') {
|
||||
inLineComment = false
|
||||
result += char
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (inBlockComment) {
|
||||
if (char === '*' && next === '/') {
|
||||
inBlockComment = false
|
||||
i++
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (inString) {
|
||||
result += char
|
||||
if (char === '\\') {
|
||||
i++
|
||||
if (i < input.length) result += input[i]
|
||||
continue
|
||||
}
|
||||
if (char === stringChar) inString = false
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === '"' || char === '\'') {
|
||||
inString = true
|
||||
stringChar = char
|
||||
result += char
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === '/' && next === '/') {
|
||||
inLineComment = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
if (char === '/' && next === '*') {
|
||||
inBlockComment = true
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
result += char
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and parse JSON config file
|
||||
*/
|
||||
function readJsonConfig(preferredPaths) {
|
||||
for (const candidate of preferredPaths) {
|
||||
if (!existsSync(candidate)) continue
|
||||
try {
|
||||
const raw = readFileSync(candidate, 'utf8').replace(/^\uFEFF/, '')
|
||||
return JSON.parse(stripJsonComments(raw))
|
||||
} catch {
|
||||
// Try next candidate
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Run shell command
|
||||
*/
|
||||
function run(cmd, args, opts = {}) {
|
||||
return new Promise((resolve) => {
|
||||
const child = spawn(cmd, args, {
|
||||
stdio: 'inherit',
|
||||
shell: process.platform === 'win32',
|
||||
...opts
|
||||
})
|
||||
child.on('close', (code) => resolve(code ?? 0))
|
||||
child.on('error', () => resolve(1))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if command exists
|
||||
*/
|
||||
async function which(cmd) {
|
||||
const probe = process.platform === 'win32' ? 'where' : 'which'
|
||||
const code = await run(probe, [cmd], { stdio: 'ignore' })
|
||||
return code === 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file via HTTPS
|
||||
*/
|
||||
function downloadFile(url, dest) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = createWriteStream(dest)
|
||||
|
||||
httpsGet(url, (response) => {
|
||||
// Handle redirects
|
||||
if (response.statusCode === 302 || response.statusCode === 301) {
|
||||
file.close()
|
||||
rmSync(dest, { force: true })
|
||||
downloadFile(response.headers.location, dest).then(resolve).catch(reject)
|
||||
return
|
||||
}
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
file.close()
|
||||
rmSync(dest, { force: true })
|
||||
reject(new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`))
|
||||
return
|
||||
}
|
||||
|
||||
response.pipe(file)
|
||||
file.on('finish', () => {
|
||||
file.close()
|
||||
resolve()
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
file.close()
|
||||
rmSync(dest, { force: true })
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract ZIP file (cross-platform)
|
||||
*/
|
||||
async function extractZip(zipPath, destDir) {
|
||||
// Try unzip (Unix-like)
|
||||
if (await which('unzip')) {
|
||||
const code = await run('unzip', ['-q', '-o', zipPath, '-d', destDir], { stdio: 'ignore' })
|
||||
if (code === 0) return
|
||||
}
|
||||
|
||||
// Try tar (modern Windows/Unix)
|
||||
if (await which('tar')) {
|
||||
const code = await run('tar', ['-xf', zipPath, '-C', destDir], { stdio: 'ignore' })
|
||||
if (code === 0) return
|
||||
}
|
||||
|
||||
// Try PowerShell Expand-Archive (Windows)
|
||||
if (process.platform === 'win32') {
|
||||
const code = await run('powershell', [
|
||||
'-Command',
|
||||
`Expand-Archive -Path "${zipPath}" -DestinationPath "${destDir}" -Force`
|
||||
], { stdio: 'ignore' })
|
||||
if (code === 0) return
|
||||
}
|
||||
|
||||
throw new Error('No extraction tool found (unzip, tar, or PowerShell required)')
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// ENVIRONMENT DETECTION
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Detect if running inside a Docker container
|
||||
* Checks multiple indicators for accuracy
|
||||
*/
|
||||
function isDocker() {
|
||||
try {
|
||||
// Method 1: Check for /.dockerenv file (most reliable)
|
||||
if (existsSync('/.dockerenv')) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Method 2: Check /proc/1/cgroup for docker
|
||||
if (existsSync('/proc/1/cgroup')) {
|
||||
const cgroupContent = readFileSync('/proc/1/cgroup', 'utf8')
|
||||
if (cgroupContent.includes('docker') || cgroupContent.includes('/kubepods/')) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Method 3: Check environment variables
|
||||
if (process.env.DOCKER === 'true' ||
|
||||
process.env.CONTAINER === 'docker' ||
|
||||
process.env.KUBERNETES_SERVICE_HOST) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Method 4: Check /proc/self/mountinfo for overlay filesystem
|
||||
if (existsSync('/proc/self/mountinfo')) {
|
||||
const mountinfo = readFileSync('/proc/self/mountinfo', 'utf8')
|
||||
if (mountinfo.includes('docker') || mountinfo.includes('overlay')) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
} catch {
|
||||
// If any error occurs (e.g., on Windows), assume not Docker
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine update mode based on config and environment
|
||||
*/
|
||||
function getUpdateMode(configData) {
|
||||
const dockerMode = configData?.update?.dockerMode || 'auto'
|
||||
|
||||
if (dockerMode === 'force-docker') {
|
||||
return 'docker'
|
||||
}
|
||||
|
||||
if (dockerMode === 'force-host') {
|
||||
return 'host'
|
||||
}
|
||||
|
||||
// Auto-detect
|
||||
return isDocker() ? 'docker' : 'host'
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// MAIN UPDATE LOGIC
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Check if update is available by comparing versions
|
||||
* Returns true if versions differ (allows both upgrades and downgrades)
|
||||
*/
|
||||
async function checkVersion() {
|
||||
try {
|
||||
// Read local version
|
||||
const localPkgPath = join(process.cwd(), 'package.json')
|
||||
if (!existsSync(localPkgPath)) {
|
||||
console.log('⚠️ Could not find local package.json')
|
||||
return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' }
|
||||
}
|
||||
|
||||
const localPkg = JSON.parse(readFileSync(localPkgPath, 'utf8'))
|
||||
const localVersion = localPkg.version
|
||||
|
||||
// Fetch remote version from GitHub
|
||||
const repoOwner = 'Obsidian-wtf'
|
||||
const repoName = 'Microsoft-Rewards-Bot'
|
||||
const branch = 'main'
|
||||
|
||||
// Add cache-buster to prevent GitHub from serving stale cached version
|
||||
const cacheBuster = Date.now()
|
||||
const pkgUrl = `https://raw.githubusercontent.com/${repoOwner}/${repoName}/refs/heads/${branch}/package.json?cb=${cacheBuster}`
|
||||
|
||||
console.log('🔍 Checking for updates...')
|
||||
console.log(` Local: ${localVersion}`)
|
||||
|
||||
return new Promise((resolve) => {
|
||||
// Request with cache-busting headers
|
||||
const options = {
|
||||
headers: {
|
||||
'Cache-Control': 'no-cache, no-store, must-revalidate',
|
||||
'Pragma': 'no-cache',
|
||||
'Expires': '0',
|
||||
'User-Agent': 'Microsoft-Rewards-Bot-Updater'
|
||||
}
|
||||
}
|
||||
|
||||
const request = httpsGet(pkgUrl, options, (res) => {
|
||||
if (res.statusCode !== 200) {
|
||||
console.log(` ⚠️ Could not check remote version (HTTP ${res.statusCode})`)
|
||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||
return
|
||||
}
|
||||
|
||||
let data = ''
|
||||
res.on('data', chunk => data += chunk)
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const remotePkg = JSON.parse(data)
|
||||
const remoteVersion = remotePkg.version
|
||||
console.log(` Remote: ${remoteVersion}`)
|
||||
|
||||
// Any difference triggers update (upgrade or downgrade)
|
||||
const updateAvailable = localVersion !== remoteVersion
|
||||
resolve({ updateAvailable, localVersion, remoteVersion })
|
||||
} catch (err) {
|
||||
console.log(` ⚠️ Could not parse remote package.json: ${err.message}`)
|
||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Timeout after 10 seconds
|
||||
request.on('error', (err) => {
|
||||
console.log(` ⚠️ Network error: ${err.message}`)
|
||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||
})
|
||||
|
||||
request.setTimeout(10000, () => {
|
||||
request.destroy()
|
||||
console.log(' ⚠️ Request timeout')
|
||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||
})
|
||||
})
|
||||
} catch (err) {
|
||||
console.log(`⚠️ Version check failed: ${err.message}`)
|
||||
return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform update using GitHub API (ZIP download)
|
||||
*/
|
||||
async function performUpdate() {
|
||||
// Step 0: Check if update is needed by comparing versions
|
||||
const versionCheck = await checkVersion()
|
||||
|
||||
if (!versionCheck.updateAvailable) {
|
||||
console.log(`✅ Already up to date (v${versionCheck.localVersion})`)
|
||||
return 0 // Exit without creating update marker
|
||||
}
|
||||
|
||||
// Step 0.5: Detect environment and determine update mode
|
||||
const configData = readJsonConfig([
|
||||
'src/config.jsonc',
|
||||
'config.jsonc',
|
||||
'src/config.json',
|
||||
'config.json'
|
||||
])
|
||||
|
||||
const updateMode = getUpdateMode(configData)
|
||||
const envIcon = updateMode === 'docker' ? '🐳' : '💻'
|
||||
|
||||
console.log(`\n📦 Update available: ${versionCheck.localVersion} → ${versionCheck.remoteVersion}`)
|
||||
console.log(`${envIcon} Environment: ${updateMode === 'docker' ? 'Docker container' : 'Host system'}`)
|
||||
console.log('⏳ Updating... (this may take a moment)\n')
|
||||
|
||||
// Step 1: Read user preferences (silent)
|
||||
const userConfig = {
|
||||
autoUpdateConfig: configData?.update?.autoUpdateConfig ?? false,
|
||||
autoUpdateAccounts: configData?.update?.autoUpdateAccounts ?? false
|
||||
}
|
||||
|
||||
// Step 2: Create backups (protected files + critical for rollback)
|
||||
const backupDir = join(process.cwd(), '.update-backup')
|
||||
const rollbackDir = join(process.cwd(), '.update-rollback')
|
||||
|
||||
// Clean previous backups
|
||||
rmSync(backupDir, { recursive: true, force: true })
|
||||
rmSync(rollbackDir, { recursive: true, force: true })
|
||||
|
||||
mkdirSync(backupDir, { recursive: true })
|
||||
mkdirSync(rollbackDir, { recursive: true })
|
||||
|
||||
const filesToProtect = [
|
||||
{ path: 'src/config.jsonc', protect: !userConfig.autoUpdateConfig },
|
||||
{ path: 'src/accounts.jsonc', protect: !userConfig.autoUpdateAccounts },
|
||||
{ path: 'src/accounts.json', protect: !userConfig.autoUpdateAccounts },
|
||||
{ path: 'sessions', protect: true, isDir: true },
|
||||
{ path: '.playwright-chromium-installed', protect: true }
|
||||
]
|
||||
|
||||
const backedUp = []
|
||||
for (const file of filesToProtect) {
|
||||
if (!file.protect) continue
|
||||
const srcPath = join(process.cwd(), file.path)
|
||||
if (!existsSync(srcPath)) continue
|
||||
|
||||
const destPath = join(backupDir, file.path)
|
||||
mkdirSync(dirname(destPath), { recursive: true })
|
||||
|
||||
try {
|
||||
if (file.isDir) {
|
||||
cpSync(srcPath, destPath, { recursive: true })
|
||||
} else {
|
||||
writeFileSync(destPath, readFileSync(srcPath))
|
||||
}
|
||||
backedUp.push(file)
|
||||
} catch {
|
||||
// Silent failure - continue with update
|
||||
}
|
||||
}
|
||||
|
||||
// Backup critical files for potential rollback
|
||||
const criticalFiles = ['package.json', 'package-lock.json', 'dist']
|
||||
for (const file of criticalFiles) {
|
||||
const srcPath = join(process.cwd(), file)
|
||||
if (!existsSync(srcPath)) continue
|
||||
const destPath = join(rollbackDir, file)
|
||||
try {
|
||||
if (statSync(srcPath).isDirectory()) {
|
||||
cpSync(srcPath, destPath, { recursive: true })
|
||||
} else {
|
||||
cpSync(srcPath, destPath)
|
||||
}
|
||||
} catch {
|
||||
// Continue
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Download latest code from GitHub
|
||||
process.stdout.write('📥 Downloading...')
|
||||
const repoOwner = 'Obsidian-wtf'
|
||||
const repoName = 'Microsoft-Rewards-Bot'
|
||||
const branch = 'main'
|
||||
const archiveUrl = `https://github.com/${repoOwner}/${repoName}/archive/refs/heads/${branch}.zip`
|
||||
|
||||
const archivePath = join(process.cwd(), '.update-download.zip')
|
||||
const extractDir = join(process.cwd(), '.update-extract')
|
||||
|
||||
try {
|
||||
await downloadFile(archiveUrl, archivePath)
|
||||
process.stdout.write(' ✓\n')
|
||||
} catch (err) {
|
||||
console.log(` ❌\n❌ Download failed: ${err.message}`)
|
||||
return 1
|
||||
}
|
||||
|
||||
// Step 4: Extract archive
|
||||
process.stdout.write('📂 Extracting...')
|
||||
rmSync(extractDir, { recursive: true, force: true })
|
||||
mkdirSync(extractDir, { recursive: true })
|
||||
|
||||
try {
|
||||
await extractZip(archivePath, extractDir)
|
||||
process.stdout.write(' ✓\n')
|
||||
} catch (err) {
|
||||
console.log(` ❌\n❌ Extraction failed: ${err.message}`)
|
||||
return 1
|
||||
}
|
||||
|
||||
// Step 5: Find extracted folder
|
||||
const extractedItems = readdirSync(extractDir)
|
||||
const extractedRepoDir = extractedItems.find(item => item.startsWith(repoName))
|
||||
if (!extractedRepoDir) {
|
||||
console.log('\n❌ Could not find extracted repository folder')
|
||||
return 1
|
||||
}
|
||||
|
||||
const sourceDir = join(extractDir, extractedRepoDir)
|
||||
|
||||
// Step 6: Copy files selectively
|
||||
process.stdout.write('📦 Updating files...')
|
||||
const itemsToUpdate = [
|
||||
'src',
|
||||
'docs',
|
||||
'setup',
|
||||
'public',
|
||||
'tests',
|
||||
'package.json',
|
||||
'package-lock.json',
|
||||
'tsconfig.json',
|
||||
'Dockerfile',
|
||||
'compose.yaml',
|
||||
'entrypoint.sh',
|
||||
'run.sh',
|
||||
'README.md',
|
||||
'LICENSE'
|
||||
]
|
||||
|
||||
for (const item of itemsToUpdate) {
|
||||
const srcPath = join(sourceDir, item)
|
||||
const destPath = join(process.cwd(), item)
|
||||
|
||||
if (!existsSync(srcPath)) continue
|
||||
|
||||
// Skip protected items
|
||||
const isProtected = backedUp.some(f => f.path === item || destPath.includes(f.path))
|
||||
if (isProtected) continue
|
||||
|
||||
try {
|
||||
if (existsSync(destPath)) {
|
||||
rmSync(destPath, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
if (statSync(srcPath).isDirectory()) {
|
||||
cpSync(srcPath, destPath, { recursive: true })
|
||||
} else {
|
||||
cpSync(srcPath, destPath)
|
||||
}
|
||||
} catch {
|
||||
// Silent failure - continue
|
||||
}
|
||||
}
|
||||
process.stdout.write(' ✓\n')
|
||||
|
||||
// Step 7: Restore protected files (silent)
|
||||
if (backedUp.length > 0) {
|
||||
for (const file of backedUp) {
|
||||
const backupPath = join(backupDir, file.path)
|
||||
if (!existsSync(backupPath)) continue
|
||||
|
||||
const destPath = join(process.cwd(), file.path)
|
||||
mkdirSync(dirname(destPath), { recursive: true })
|
||||
|
||||
try {
|
||||
if (file.isDir) {
|
||||
rmSync(destPath, { recursive: true, force: true })
|
||||
cpSync(backupPath, destPath, { recursive: true })
|
||||
} else {
|
||||
writeFileSync(destPath, readFileSync(backupPath))
|
||||
}
|
||||
} catch {
|
||||
// Silent failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 8: Cleanup temporary files (silent)
|
||||
rmSync(archivePath, { force: true })
|
||||
rmSync(extractDir, { recursive: true, force: true })
|
||||
rmSync(backupDir, { recursive: true, force: true })
|
||||
|
||||
// Step 9: Create update marker for bot restart detection
|
||||
const updateMarkerPath = join(process.cwd(), '.update-happened')
|
||||
writeFileSync(updateMarkerPath, JSON.stringify({
|
||||
timestamp: new Date().toISOString(),
|
||||
fromVersion: versionCheck.localVersion,
|
||||
toVersion: versionCheck.remoteVersion,
|
||||
method: 'github-api'
|
||||
}, null, 2))
|
||||
|
||||
// Step 10: Install dependencies & rebuild
|
||||
const hasNpm = await which('npm')
|
||||
if (!hasNpm) {
|
||||
console.log('⚠️ npm not found - please run: npm install && npm run build')
|
||||
return 0
|
||||
}
|
||||
|
||||
process.stdout.write('📦 Installing dependencies...')
|
||||
const installCode = await run('npm', ['ci', '--silent'], { stdio: 'ignore' })
|
||||
if (installCode !== 0) {
|
||||
await run('npm', ['install', '--silent'], { stdio: 'ignore' })
|
||||
}
|
||||
process.stdout.write(' ✓\n')
|
||||
|
||||
process.stdout.write('🔨 Building project...')
|
||||
const buildCode = await run('npm', ['run', 'build'], { stdio: 'ignore' })
|
||||
|
||||
if (buildCode !== 0) {
|
||||
// Build failed - rollback
|
||||
process.stdout.write(' ❌\n')
|
||||
console.log('⚠️ Build failed, rolling back to previous version...')
|
||||
|
||||
// Restore from rollback
|
||||
for (const file of criticalFiles) {
|
||||
const srcPath = join(rollbackDir, file)
|
||||
const destPath = join(process.cwd(), file)
|
||||
if (!existsSync(srcPath)) continue
|
||||
try {
|
||||
rmSync(destPath, { recursive: true, force: true })
|
||||
if (statSync(srcPath).isDirectory()) {
|
||||
cpSync(srcPath, destPath, { recursive: true })
|
||||
} else {
|
||||
cpSync(srcPath, destPath)
|
||||
}
|
||||
} catch {
|
||||
// Continue
|
||||
}
|
||||
}
|
||||
|
||||
console.log('✅ Rollback complete - using previous version')
|
||||
rmSync(rollbackDir, { recursive: true, force: true })
|
||||
return 1
|
||||
}
|
||||
|
||||
process.stdout.write(' ✓\n')
|
||||
|
||||
// Step 11: Verify integrity (check if critical files exist)
|
||||
process.stdout.write('🔍 Verifying integrity...')
|
||||
const criticalPaths = [
|
||||
'dist/index.js',
|
||||
'package.json',
|
||||
'src/index.ts'
|
||||
]
|
||||
|
||||
let integrityOk = true
|
||||
for (const path of criticalPaths) {
|
||||
if (!existsSync(join(process.cwd(), path))) {
|
||||
integrityOk = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!integrityOk) {
|
||||
process.stdout.write(' ❌\n')
|
||||
console.log('⚠️ Integrity check failed, rolling back...')
|
||||
|
||||
// Restore from rollback
|
||||
for (const file of criticalFiles) {
|
||||
const srcPath = join(rollbackDir, file)
|
||||
const destPath = join(process.cwd(), file)
|
||||
if (!existsSync(srcPath)) continue
|
||||
try {
|
||||
rmSync(destPath, { recursive: true, force: true })
|
||||
if (statSync(srcPath).isDirectory()) {
|
||||
cpSync(srcPath, destPath, { recursive: true })
|
||||
} else {
|
||||
cpSync(srcPath, destPath)
|
||||
}
|
||||
} catch {
|
||||
// Continue
|
||||
}
|
||||
}
|
||||
|
||||
console.log('✅ Rollback complete - using previous version')
|
||||
rmSync(rollbackDir, { recursive: true, force: true })
|
||||
return 1
|
||||
}
|
||||
|
||||
process.stdout.write(' ✓\n')
|
||||
|
||||
// Clean rollback backup on success
|
||||
rmSync(rollbackDir, { recursive: true, force: true })
|
||||
|
||||
console.log(`\n✅ Updated successfully! (${versionCheck.localVersion} → ${versionCheck.remoteVersion})`)
|
||||
|
||||
// Different behavior for Docker vs Host
|
||||
if (updateMode === 'docker') {
|
||||
console.log('<27> Docker mode: Update complete')
|
||||
console.log(' Container will restart automatically if configured\n')
|
||||
// In Docker, don't restart - let orchestrator handle it
|
||||
// Just exit cleanly so Docker can restart the container
|
||||
return 0
|
||||
} else {
|
||||
console.log('<27>🔄 Restarting in same process...\n')
|
||||
// In host mode, signal restart needed
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// ENTRY POINT
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Cleanup temporary files
|
||||
*/
|
||||
function cleanup() {
|
||||
const tempDirs = [
|
||||
'.update-backup',
|
||||
'.update-rollback',
|
||||
'.update-extract',
|
||||
'.update-download.zip'
|
||||
]
|
||||
|
||||
for (const dir of tempDirs) {
|
||||
const path = join(process.cwd(), dir)
|
||||
try {
|
||||
if (existsSync(path)) {
|
||||
if (statSync(path).isDirectory()) {
|
||||
rmSync(path, { recursive: true, force: true })
|
||||
} else {
|
||||
rmSync(path, { force: true })
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// Check if updates are enabled in config
|
||||
const configData = readJsonConfig([
|
||||
'src/config.jsonc',
|
||||
'config.jsonc',
|
||||
'src/config.json',
|
||||
'config.json'
|
||||
])
|
||||
|
||||
if (configData?.update?.enabled === false) {
|
||||
console.log('\n⚠️ Updates are disabled in config.jsonc')
|
||||
console.log('To enable: set "update.enabled" to true in src/config.jsonc\n')
|
||||
return 0
|
||||
}
|
||||
|
||||
// Global timeout: 5 minutes max
|
||||
const timeout = setTimeout(() => {
|
||||
console.error('\n⏱️ Update timeout (5 min) - cleaning up...')
|
||||
cleanup()
|
||||
process.exit(1)
|
||||
}, 5 * 60 * 1000)
|
||||
|
||||
try {
|
||||
const code = await performUpdate()
|
||||
clearTimeout(timeout)
|
||||
|
||||
// Final cleanup of temporary files
|
||||
cleanup()
|
||||
|
||||
process.exit(code)
|
||||
} catch (err) {
|
||||
clearTimeout(timeout)
|
||||
cleanup()
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('\n❌ Update failed with error:', err)
|
||||
console.error('\nCleaning up and reverting...')
|
||||
cleanup()
|
||||
process.exit(1)
|
||||
})
|
||||
Reference in New Issue
Block a user