mirror of
https://github.com/LightZirconite/Microsoft-Rewards-Bot.git
synced 2026-01-09 09:06:15 +00:00
New structure 2
This commit is contained in:
17
.github/copilot-instructions.md
vendored
17
.github/copilot-instructions.md
vendored
@@ -95,17 +95,19 @@ docker/ # Docker deployment files
|
|||||||
├── entrypoint.sh # Container initialization script
|
├── entrypoint.sh # Container initialization script
|
||||||
├── run_daily.sh # Daily execution wrapper (cron)
|
├── run_daily.sh # Daily execution wrapper (cron)
|
||||||
└── crontab.template # Cron schedule template
|
└── crontab.template # Cron schedule template
|
||||||
scripts/ # Utility scripts
|
scripts/ # Automation scripts
|
||||||
└── run.sh # Nix development environment launcher
|
└── installer/ # Setup and update automation
|
||||||
setup/
|
├── setup.mjs # Initial setup automation
|
||||||
|
├── update.mjs # GitHub ZIP-based auto-updater (NO GIT REQUIRED!)
|
||||||
|
└── README.md # Installer documentation
|
||||||
|
setup/ # Setup and execution scripts
|
||||||
├── setup.bat # Windows setup script
|
├── setup.bat # Windows setup script
|
||||||
├── setup.sh # Linux/Mac setup script
|
├── setup.sh # Linux/Mac setup script
|
||||||
|
├── run.sh # Nix development environment launcher
|
||||||
├── nix/ # NixOS configuration
|
├── nix/ # NixOS configuration
|
||||||
│ ├── flake.nix # Nix flake definition
|
│ ├── flake.nix # Nix flake definition
|
||||||
│ └── flake.lock # Nix flake lock file
|
│ └── flake.lock # Nix flake lock file
|
||||||
└── update/
|
└── README.md # Setup guide
|
||||||
├── setup.mjs # Initial setup automation
|
|
||||||
└── update.mjs # GitHub ZIP-based auto-updater (NO GIT REQUIRED!)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -1115,10 +1117,11 @@ private combinedDeduplication(queries: string[], threshold = 0.65): string[] {
|
|||||||
- **Methods:** `generateEmail()` (8 realistic patterns), `generatePassword()` (14-18 chars), `generateBirthdate()` (age 20-45), `generateNames()` (extracts from email)
|
- **Methods:** `generateEmail()` (8 realistic patterns), `generatePassword()` (14-18 chars), `generateBirthdate()` (age 20-45), `generateNames()` (extracts from email)
|
||||||
- **Pattern:** Uses nameDatabase.ts with 100+ first/last names
|
- **Pattern:** Uses nameDatabase.ts with 100+ first/last names
|
||||||
|
|
||||||
### Auto-Update System (`setup/update/update.mjs`)
|
### Auto-Update System (`scripts/installer/update.mjs`)
|
||||||
|
|
||||||
**update.mjs (600+ LINES - CRITICAL FEATURE):**
|
**update.mjs (600+ LINES - CRITICAL FEATURE):**
|
||||||
- **Purpose:** Git-free update system using GitHub ZIP downloads (NO merge conflicts!)
|
- **Purpose:** Git-free update system using GitHub ZIP downloads (NO merge conflicts!)
|
||||||
|
- **Location:** `scripts/installer/update.mjs` (moved from `setup/update/`)
|
||||||
- **Features:** Version comparison (cache-busting), GitHub API ZIP download, selective file preservation, automatic rollback on build failure, integrity checks, Docker vs Host detection, dependency installation, TypeScript rebuild verification, update marker creation
|
- **Features:** Version comparison (cache-busting), GitHub API ZIP download, selective file preservation, automatic rollback on build failure, integrity checks, Docker vs Host detection, dependency installation, TypeScript rebuild verification, update marker creation
|
||||||
- **Protected Files:** `src/config.jsonc`, `src/accounts.jsonc`, `sessions/`, `.playwright-chromium-installed`
|
- **Protected Files:** `src/config.jsonc`, `src/accounts.jsonc`, `sessions/`, `.playwright-chromium-installed`
|
||||||
- **Workflow:** Check version → Create backups → Download ZIP → Extract → Selective copy → Restore protected → npm ci → npm install → npm build → Verify integrity → Create marker → Clean temp
|
- **Workflow:** Check version → Create backups → Download ZIP → Extract → Selective copy → Restore protected → npm ci → npm install → npm build → Verify integrity → Create marker → Clean temp
|
||||||
|
|||||||
@@ -30,7 +30,7 @@
|
|||||||
"dashboard": "node --enable-source-maps ./dist/index.js -dashboard",
|
"dashboard": "node --enable-source-maps ./dist/index.js -dashboard",
|
||||||
"dashboard-dev": "ts-node ./src/index.ts -dashboard",
|
"dashboard-dev": "ts-node ./src/index.ts -dashboard",
|
||||||
"lint": "eslint \"src/**/*.{ts,tsx}\"",
|
"lint": "eslint \"src/**/*.{ts,tsx}\"",
|
||||||
"setup": "node ./setup/update/setup.mjs",
|
"setup": "node ./scripts/installer/setup.mjs",
|
||||||
"kill-chrome-win": "powershell -Command \"Get-Process | Where-Object { $_.MainModule.FileVersionInfo.FileDescription -eq 'Google Chrome for Testing' } | ForEach-Object { Stop-Process -Id $_.Id -Force }\"",
|
"kill-chrome-win": "powershell -Command \"Get-Process | Where-Object { $_.MainModule.FileVersionInfo.FileDescription -eq 'Google Chrome for Testing' } | ForEach-Object { Stop-Process -Id $_.Id -Force }\"",
|
||||||
"create-docker": "docker build -t microsoft-rewards-bot ."
|
"create-docker": "docker build -t microsoft-rewards-bot ."
|
||||||
},
|
},
|
||||||
@@ -80,4 +80,4 @@
|
|||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
"ws": "^8.18.3"
|
"ws": "^8.18.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,22 +1,32 @@
|
|||||||
# Scripts Directory
|
# Scripts Directory
|
||||||
|
|
||||||
This directory contains utility scripts for development and deployment.
|
This directory contains automation scripts for the Microsoft Rewards Bot.
|
||||||
|
|
||||||
## Available Scripts
|
## Available Scripts
|
||||||
|
|
||||||
### `run.sh`
|
### `installer/`
|
||||||
**Purpose:** Nix development environment launcher
|
**Purpose:** Automated setup and update scripts
|
||||||
**Usage:** `./run.sh`
|
**Description:** Contains Node.js scripts for initial setup and GitHub-based updates.
|
||||||
**Description:** Launches the bot using Nix develop environment with xvfb-run for headless browser support.
|
|
||||||
|
|
||||||
**Requirements:**
|
#### `installer/setup.mjs`
|
||||||
- Nix package manager
|
**Purpose:** Initial project setup automation
|
||||||
- xvfb (X Virtual Framebuffer)
|
**Usage:** `npm run setup` or `node scripts/installer/setup.mjs`
|
||||||
|
**Description:** Automates initial project configuration, dependency installation, and first-time setup.
|
||||||
|
|
||||||
**Environment:**
|
#### `installer/update.mjs`
|
||||||
This script is designed for NixOS or systems with Nix installed. It provides a reproducible development environment as defined in `setup/nix/flake.nix`.
|
**Purpose:** GitHub ZIP-based auto-updater
|
||||||
|
**Usage:** `node scripts/installer/update.mjs`
|
||||||
|
**Description:** Downloads and applies updates from GitHub without Git. Preserves user configuration files (`accounts.jsonc`, `config.jsonc`, `sessions/`).
|
||||||
|
|
||||||
|
**Features:**
|
||||||
|
- No Git required
|
||||||
|
- No merge conflicts
|
||||||
|
- Selective file preservation
|
||||||
|
- Automatic rollback on failure
|
||||||
|
- Dependency installation
|
||||||
|
- TypeScript rebuild
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
For Docker deployment, see the `docker/` directory.
|
For Docker deployment, see the `docker/` directory.
|
||||||
For setup scripts, see the `setup/` directory.
|
For shell scripts (setup.bat, setup.sh, run.sh), see the `setup/` directory.
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ These files are **conditionally protected** (based on config):
|
|||||||
Run the update manually:
|
Run the update manually:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
node setup/update/update.mjs
|
node scripts/installer/update.mjs
|
||||||
```
|
```
|
||||||
|
|
||||||
## Update Detection
|
## Update Detection
|
||||||
@@ -95,8 +95,9 @@ If no updates are available, **no marker is created** and the bot won't restart.
|
|||||||
|
|
||||||
### File Structure
|
### File Structure
|
||||||
```
|
```
|
||||||
setup/update/
|
scripts/installer/
|
||||||
├── update.mjs # Main update script (468 lines)
|
├── update.mjs # Main update script (auto-updater)
|
||||||
|
├── setup.mjs # Initial setup wizard
|
||||||
└── README.md # This file
|
└── README.md # This file
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
* - TypeScript rebuild
|
* - TypeScript rebuild
|
||||||
*
|
*
|
||||||
* Usage:
|
* Usage:
|
||||||
* node setup/update/update.mjs # Run update
|
* node scripts/installer/update.mjs # Run update
|
||||||
* npm run start # Bot runs this automatically if enabled
|
* npm run start # Bot runs this automatically if enabled
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ function stripJsonComments(input) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (char === '"' || char === "'") {
|
if (char === '"' || char === '\'') {
|
||||||
inString = true
|
inString = true
|
||||||
stringChar = char
|
stringChar = char
|
||||||
result += char
|
result += char
|
||||||
@@ -113,10 +113,10 @@ function readJsonConfig(preferredPaths) {
|
|||||||
*/
|
*/
|
||||||
function run(cmd, args, opts = {}) {
|
function run(cmd, args, opts = {}) {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const child = spawn(cmd, args, {
|
const child = spawn(cmd, args, {
|
||||||
stdio: 'inherit',
|
stdio: 'inherit',
|
||||||
shell: process.platform === 'win32',
|
shell: process.platform === 'win32',
|
||||||
...opts
|
...opts
|
||||||
})
|
})
|
||||||
child.on('close', (code) => resolve(code ?? 0))
|
child.on('close', (code) => resolve(code ?? 0))
|
||||||
child.on('error', () => resolve(1))
|
child.on('error', () => resolve(1))
|
||||||
@@ -138,7 +138,7 @@ async function which(cmd) {
|
|||||||
function downloadFile(url, dest) {
|
function downloadFile(url, dest) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const file = createWriteStream(dest)
|
const file = createWriteStream(dest)
|
||||||
|
|
||||||
httpsGet(url, (response) => {
|
httpsGet(url, (response) => {
|
||||||
// Handle redirects
|
// Handle redirects
|
||||||
if (response.statusCode === 302 || response.statusCode === 301) {
|
if (response.statusCode === 302 || response.statusCode === 301) {
|
||||||
@@ -147,14 +147,14 @@ function downloadFile(url, dest) {
|
|||||||
downloadFile(response.headers.location, dest).then(resolve).catch(reject)
|
downloadFile(response.headers.location, dest).then(resolve).catch(reject)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.statusCode !== 200) {
|
if (response.statusCode !== 200) {
|
||||||
file.close()
|
file.close()
|
||||||
rmSync(dest, { force: true })
|
rmSync(dest, { force: true })
|
||||||
reject(new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`))
|
reject(new Error(`HTTP ${response.statusCode}: ${response.statusMessage}`))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
response.pipe(file)
|
response.pipe(file)
|
||||||
file.on('finish', () => {
|
file.on('finish', () => {
|
||||||
file.close()
|
file.close()
|
||||||
@@ -177,22 +177,22 @@ async function extractZip(zipPath, destDir) {
|
|||||||
const code = await run('unzip', ['-q', '-o', zipPath, '-d', destDir], { stdio: 'ignore' })
|
const code = await run('unzip', ['-q', '-o', zipPath, '-d', destDir], { stdio: 'ignore' })
|
||||||
if (code === 0) return
|
if (code === 0) return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try tar (modern Windows/Unix)
|
// Try tar (modern Windows/Unix)
|
||||||
if (await which('tar')) {
|
if (await which('tar')) {
|
||||||
const code = await run('tar', ['-xf', zipPath, '-C', destDir], { stdio: 'ignore' })
|
const code = await run('tar', ['-xf', zipPath, '-C', destDir], { stdio: 'ignore' })
|
||||||
if (code === 0) return
|
if (code === 0) return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try PowerShell Expand-Archive (Windows)
|
// Try PowerShell Expand-Archive (Windows)
|
||||||
if (process.platform === 'win32') {
|
if (process.platform === 'win32') {
|
||||||
const code = await run('powershell', [
|
const code = await run('powershell', [
|
||||||
'-Command',
|
'-Command',
|
||||||
`Expand-Archive -Path "${zipPath}" -DestinationPath "${destDir}" -Force`
|
`Expand-Archive -Path "${zipPath}" -DestinationPath "${destDir}" -Force`
|
||||||
], { stdio: 'ignore' })
|
], { stdio: 'ignore' })
|
||||||
if (code === 0) return
|
if (code === 0) return
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('No extraction tool found (unzip, tar, or PowerShell required)')
|
throw new Error('No extraction tool found (unzip, tar, or PowerShell required)')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ function isDocker() {
|
|||||||
if (existsSync('/.dockerenv')) {
|
if (existsSync('/.dockerenv')) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method 2: Check /proc/1/cgroup for docker
|
// Method 2: Check /proc/1/cgroup for docker
|
||||||
if (existsSync('/proc/1/cgroup')) {
|
if (existsSync('/proc/1/cgroup')) {
|
||||||
const cgroupContent = readFileSync('/proc/1/cgroup', 'utf8')
|
const cgroupContent = readFileSync('/proc/1/cgroup', 'utf8')
|
||||||
@@ -218,14 +218,14 @@ function isDocker() {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method 3: Check environment variables
|
// Method 3: Check environment variables
|
||||||
if (process.env.DOCKER === 'true' ||
|
if (process.env.DOCKER === 'true' ||
|
||||||
process.env.CONTAINER === 'docker' ||
|
process.env.CONTAINER === 'docker' ||
|
||||||
process.env.KUBERNETES_SERVICE_HOST) {
|
process.env.KUBERNETES_SERVICE_HOST) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method 4: Check /proc/self/mountinfo for overlay filesystem
|
// Method 4: Check /proc/self/mountinfo for overlay filesystem
|
||||||
if (existsSync('/proc/self/mountinfo')) {
|
if (existsSync('/proc/self/mountinfo')) {
|
||||||
const mountinfo = readFileSync('/proc/self/mountinfo', 'utf8')
|
const mountinfo = readFileSync('/proc/self/mountinfo', 'utf8')
|
||||||
@@ -233,7 +233,7 @@ function isDocker() {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
} catch {
|
} catch {
|
||||||
// If any error occurs (e.g., on Windows), assume not Docker
|
// If any error occurs (e.g., on Windows), assume not Docker
|
||||||
@@ -246,15 +246,15 @@ function isDocker() {
|
|||||||
*/
|
*/
|
||||||
function getUpdateMode(configData) {
|
function getUpdateMode(configData) {
|
||||||
const dockerMode = configData?.update?.dockerMode || 'auto'
|
const dockerMode = configData?.update?.dockerMode || 'auto'
|
||||||
|
|
||||||
if (dockerMode === 'force-docker') {
|
if (dockerMode === 'force-docker') {
|
||||||
return 'docker'
|
return 'docker'
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dockerMode === 'force-host') {
|
if (dockerMode === 'force-host') {
|
||||||
return 'host'
|
return 'host'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-detect
|
// Auto-detect
|
||||||
return isDocker() ? 'docker' : 'host'
|
return isDocker() ? 'docker' : 'host'
|
||||||
}
|
}
|
||||||
@@ -275,22 +275,22 @@ async function checkVersion() {
|
|||||||
console.log('⚠️ Could not find local package.json')
|
console.log('⚠️ Could not find local package.json')
|
||||||
return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' }
|
return { updateAvailable: false, localVersion: 'unknown', remoteVersion: 'unknown' }
|
||||||
}
|
}
|
||||||
|
|
||||||
const localPkg = JSON.parse(readFileSync(localPkgPath, 'utf8'))
|
const localPkg = JSON.parse(readFileSync(localPkgPath, 'utf8'))
|
||||||
const localVersion = localPkg.version
|
const localVersion = localPkg.version
|
||||||
|
|
||||||
// Fetch remote version from GitHub
|
// Fetch remote version from GitHub
|
||||||
const repoOwner = 'Obsidian-wtf'
|
const repoOwner = 'Obsidian-wtf'
|
||||||
const repoName = 'Microsoft-Rewards-Bot'
|
const repoName = 'Microsoft-Rewards-Bot'
|
||||||
const branch = 'main'
|
const branch = 'main'
|
||||||
|
|
||||||
// Add cache-buster to prevent GitHub from serving stale cached version
|
// Add cache-buster to prevent GitHub from serving stale cached version
|
||||||
const cacheBuster = Date.now()
|
const cacheBuster = Date.now()
|
||||||
const pkgUrl = `https://raw.githubusercontent.com/${repoOwner}/${repoName}/refs/heads/${branch}/package.json?cb=${cacheBuster}`
|
const pkgUrl = `https://raw.githubusercontent.com/${repoOwner}/${repoName}/refs/heads/${branch}/package.json?cb=${cacheBuster}`
|
||||||
|
|
||||||
console.log('🔍 Checking for updates...')
|
console.log('🔍 Checking for updates...')
|
||||||
console.log(` Local: ${localVersion}`)
|
console.log(` Local: ${localVersion}`)
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
// Request with cache-busting headers
|
// Request with cache-busting headers
|
||||||
const options = {
|
const options = {
|
||||||
@@ -301,14 +301,14 @@ async function checkVersion() {
|
|||||||
'User-Agent': 'Microsoft-Rewards-Bot-Updater'
|
'User-Agent': 'Microsoft-Rewards-Bot-Updater'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const request = httpsGet(pkgUrl, options, (res) => {
|
const request = httpsGet(pkgUrl, options, (res) => {
|
||||||
if (res.statusCode !== 200) {
|
if (res.statusCode !== 200) {
|
||||||
console.log(` ⚠️ Could not check remote version (HTTP ${res.statusCode})`)
|
console.log(` ⚠️ Could not check remote version (HTTP ${res.statusCode})`)
|
||||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let data = ''
|
let data = ''
|
||||||
res.on('data', chunk => data += chunk)
|
res.on('data', chunk => data += chunk)
|
||||||
res.on('end', () => {
|
res.on('end', () => {
|
||||||
@@ -316,7 +316,7 @@ async function checkVersion() {
|
|||||||
const remotePkg = JSON.parse(data)
|
const remotePkg = JSON.parse(data)
|
||||||
const remoteVersion = remotePkg.version
|
const remoteVersion = remotePkg.version
|
||||||
console.log(` Remote: ${remoteVersion}`)
|
console.log(` Remote: ${remoteVersion}`)
|
||||||
|
|
||||||
// Any difference triggers update (upgrade or downgrade)
|
// Any difference triggers update (upgrade or downgrade)
|
||||||
const updateAvailable = localVersion !== remoteVersion
|
const updateAvailable = localVersion !== remoteVersion
|
||||||
resolve({ updateAvailable, localVersion, remoteVersion })
|
resolve({ updateAvailable, localVersion, remoteVersion })
|
||||||
@@ -326,13 +326,13 @@ async function checkVersion() {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// Timeout after 10 seconds
|
// Timeout after 10 seconds
|
||||||
request.on('error', (err) => {
|
request.on('error', (err) => {
|
||||||
console.log(` ⚠️ Network error: ${err.message}`)
|
console.log(` ⚠️ Network error: ${err.message}`)
|
||||||
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
resolve({ updateAvailable: false, localVersion, remoteVersion: 'unknown' })
|
||||||
})
|
})
|
||||||
|
|
||||||
request.setTimeout(10000, () => {
|
request.setTimeout(10000, () => {
|
||||||
request.destroy()
|
request.destroy()
|
||||||
console.log(' ⚠️ Request timeout')
|
console.log(' ⚠️ Request timeout')
|
||||||
@@ -351,12 +351,12 @@ async function checkVersion() {
|
|||||||
async function performUpdate() {
|
async function performUpdate() {
|
||||||
// Step 0: Check if update is needed by comparing versions
|
// Step 0: Check if update is needed by comparing versions
|
||||||
const versionCheck = await checkVersion()
|
const versionCheck = await checkVersion()
|
||||||
|
|
||||||
if (!versionCheck.updateAvailable) {
|
if (!versionCheck.updateAvailable) {
|
||||||
console.log(`✅ Already up to date (v${versionCheck.localVersion})`)
|
console.log(`✅ Already up to date (v${versionCheck.localVersion})`)
|
||||||
return 0 // Exit without creating update marker
|
return 0 // Exit without creating update marker
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 0.5: Detect environment and determine update mode
|
// Step 0.5: Detect environment and determine update mode
|
||||||
const configData = readJsonConfig([
|
const configData = readJsonConfig([
|
||||||
'src/config.jsonc',
|
'src/config.jsonc',
|
||||||
@@ -364,31 +364,31 @@ async function performUpdate() {
|
|||||||
'src/config.json',
|
'src/config.json',
|
||||||
'config.json'
|
'config.json'
|
||||||
])
|
])
|
||||||
|
|
||||||
const updateMode = getUpdateMode(configData)
|
const updateMode = getUpdateMode(configData)
|
||||||
const envIcon = updateMode === 'docker' ? '🐳' : '💻'
|
const envIcon = updateMode === 'docker' ? '🐳' : '💻'
|
||||||
|
|
||||||
console.log(`\n📦 Update available: ${versionCheck.localVersion} → ${versionCheck.remoteVersion}`)
|
console.log(`\n📦 Update available: ${versionCheck.localVersion} → ${versionCheck.remoteVersion}`)
|
||||||
console.log(`${envIcon} Environment: ${updateMode === 'docker' ? 'Docker container' : 'Host system'}`)
|
console.log(`${envIcon} Environment: ${updateMode === 'docker' ? 'Docker container' : 'Host system'}`)
|
||||||
console.log('⏳ Updating... (this may take a moment)\n')
|
console.log('⏳ Updating... (this may take a moment)\n')
|
||||||
|
|
||||||
// Step 1: Read user preferences (silent)
|
// Step 1: Read user preferences (silent)
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
autoUpdateConfig: configData?.update?.autoUpdateConfig ?? false,
|
autoUpdateConfig: configData?.update?.autoUpdateConfig ?? false,
|
||||||
autoUpdateAccounts: configData?.update?.autoUpdateAccounts ?? false
|
autoUpdateAccounts: configData?.update?.autoUpdateAccounts ?? false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 2: Create backups (protected files + critical for rollback)
|
// Step 2: Create backups (protected files + critical for rollback)
|
||||||
const backupDir = join(process.cwd(), '.update-backup')
|
const backupDir = join(process.cwd(), '.update-backup')
|
||||||
const rollbackDir = join(process.cwd(), '.update-rollback')
|
const rollbackDir = join(process.cwd(), '.update-rollback')
|
||||||
|
|
||||||
// Clean previous backups
|
// Clean previous backups
|
||||||
rmSync(backupDir, { recursive: true, force: true })
|
rmSync(backupDir, { recursive: true, force: true })
|
||||||
rmSync(rollbackDir, { recursive: true, force: true })
|
rmSync(rollbackDir, { recursive: true, force: true })
|
||||||
|
|
||||||
mkdirSync(backupDir, { recursive: true })
|
mkdirSync(backupDir, { recursive: true })
|
||||||
mkdirSync(rollbackDir, { recursive: true })
|
mkdirSync(rollbackDir, { recursive: true })
|
||||||
|
|
||||||
const filesToProtect = [
|
const filesToProtect = [
|
||||||
{ path: 'src/config.jsonc', protect: !userConfig.autoUpdateConfig },
|
{ path: 'src/config.jsonc', protect: !userConfig.autoUpdateConfig },
|
||||||
{ path: 'src/accounts.jsonc', protect: !userConfig.autoUpdateAccounts },
|
{ path: 'src/accounts.jsonc', protect: !userConfig.autoUpdateAccounts },
|
||||||
@@ -396,16 +396,16 @@ async function performUpdate() {
|
|||||||
{ path: 'sessions', protect: true, isDir: true },
|
{ path: 'sessions', protect: true, isDir: true },
|
||||||
{ path: '.playwright-chromium-installed', protect: true }
|
{ path: '.playwright-chromium-installed', protect: true }
|
||||||
]
|
]
|
||||||
|
|
||||||
const backedUp = []
|
const backedUp = []
|
||||||
for (const file of filesToProtect) {
|
for (const file of filesToProtect) {
|
||||||
if (!file.protect) continue
|
if (!file.protect) continue
|
||||||
const srcPath = join(process.cwd(), file.path)
|
const srcPath = join(process.cwd(), file.path)
|
||||||
if (!existsSync(srcPath)) continue
|
if (!existsSync(srcPath)) continue
|
||||||
|
|
||||||
const destPath = join(backupDir, file.path)
|
const destPath = join(backupDir, file.path)
|
||||||
mkdirSync(dirname(destPath), { recursive: true })
|
mkdirSync(dirname(destPath), { recursive: true })
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (file.isDir) {
|
if (file.isDir) {
|
||||||
cpSync(srcPath, destPath, { recursive: true })
|
cpSync(srcPath, destPath, { recursive: true })
|
||||||
@@ -417,7 +417,7 @@ async function performUpdate() {
|
|||||||
// Silent failure - continue with update
|
// Silent failure - continue with update
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Backup critical files for potential rollback
|
// Backup critical files for potential rollback
|
||||||
const criticalFiles = ['package.json', 'package-lock.json', 'dist']
|
const criticalFiles = ['package.json', 'package-lock.json', 'dist']
|
||||||
for (const file of criticalFiles) {
|
for (const file of criticalFiles) {
|
||||||
@@ -434,17 +434,17 @@ async function performUpdate() {
|
|||||||
// Continue
|
// Continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 3: Download latest code from GitHub
|
// Step 3: Download latest code from GitHub
|
||||||
process.stdout.write('📥 Downloading...')
|
process.stdout.write('📥 Downloading...')
|
||||||
const repoOwner = 'Obsidian-wtf'
|
const repoOwner = 'Obsidian-wtf'
|
||||||
const repoName = 'Microsoft-Rewards-Bot'
|
const repoName = 'Microsoft-Rewards-Bot'
|
||||||
const branch = 'main'
|
const branch = 'main'
|
||||||
const archiveUrl = `https://github.com/${repoOwner}/${repoName}/archive/refs/heads/${branch}.zip`
|
const archiveUrl = `https://github.com/${repoOwner}/${repoName}/archive/refs/heads/${branch}.zip`
|
||||||
|
|
||||||
const archivePath = join(process.cwd(), '.update-download.zip')
|
const archivePath = join(process.cwd(), '.update-download.zip')
|
||||||
const extractDir = join(process.cwd(), '.update-extract')
|
const extractDir = join(process.cwd(), '.update-extract')
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await downloadFile(archiveUrl, archivePath)
|
await downloadFile(archiveUrl, archivePath)
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
@@ -452,12 +452,12 @@ async function performUpdate() {
|
|||||||
console.log(` ❌\n❌ Download failed: ${err.message}`)
|
console.log(` ❌\n❌ Download failed: ${err.message}`)
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 4: Extract archive
|
// Step 4: Extract archive
|
||||||
process.stdout.write('📂 Extracting...')
|
process.stdout.write('📂 Extracting...')
|
||||||
rmSync(extractDir, { recursive: true, force: true })
|
rmSync(extractDir, { recursive: true, force: true })
|
||||||
mkdirSync(extractDir, { recursive: true })
|
mkdirSync(extractDir, { recursive: true })
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await extractZip(archivePath, extractDir)
|
await extractZip(archivePath, extractDir)
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
@@ -465,7 +465,7 @@ async function performUpdate() {
|
|||||||
console.log(` ❌\n❌ Extraction failed: ${err.message}`)
|
console.log(` ❌\n❌ Extraction failed: ${err.message}`)
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 5: Find extracted folder
|
// Step 5: Find extracted folder
|
||||||
const extractedItems = readdirSync(extractDir)
|
const extractedItems = readdirSync(extractDir)
|
||||||
const extractedRepoDir = extractedItems.find(item => item.startsWith(repoName))
|
const extractedRepoDir = extractedItems.find(item => item.startsWith(repoName))
|
||||||
@@ -473,9 +473,9 @@ async function performUpdate() {
|
|||||||
console.log('\n❌ Could not find extracted repository folder')
|
console.log('\n❌ Could not find extracted repository folder')
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
const sourceDir = join(extractDir, extractedRepoDir)
|
const sourceDir = join(extractDir, extractedRepoDir)
|
||||||
|
|
||||||
// Step 6: Copy files selectively
|
// Step 6: Copy files selectively
|
||||||
process.stdout.write('📦 Updating files...')
|
process.stdout.write('📦 Updating files...')
|
||||||
const itemsToUpdate = [
|
const itemsToUpdate = [
|
||||||
@@ -494,22 +494,22 @@ async function performUpdate() {
|
|||||||
'README.md',
|
'README.md',
|
||||||
'LICENSE'
|
'LICENSE'
|
||||||
]
|
]
|
||||||
|
|
||||||
for (const item of itemsToUpdate) {
|
for (const item of itemsToUpdate) {
|
||||||
const srcPath = join(sourceDir, item)
|
const srcPath = join(sourceDir, item)
|
||||||
const destPath = join(process.cwd(), item)
|
const destPath = join(process.cwd(), item)
|
||||||
|
|
||||||
if (!existsSync(srcPath)) continue
|
if (!existsSync(srcPath)) continue
|
||||||
|
|
||||||
// Skip protected items
|
// Skip protected items
|
||||||
const isProtected = backedUp.some(f => f.path === item || destPath.includes(f.path))
|
const isProtected = backedUp.some(f => f.path === item || destPath.includes(f.path))
|
||||||
if (isProtected) continue
|
if (isProtected) continue
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (existsSync(destPath)) {
|
if (existsSync(destPath)) {
|
||||||
rmSync(destPath, { recursive: true, force: true })
|
rmSync(destPath, { recursive: true, force: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (statSync(srcPath).isDirectory()) {
|
if (statSync(srcPath).isDirectory()) {
|
||||||
cpSync(srcPath, destPath, { recursive: true })
|
cpSync(srcPath, destPath, { recursive: true })
|
||||||
} else {
|
} else {
|
||||||
@@ -520,16 +520,16 @@ async function performUpdate() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
|
|
||||||
// Step 7: Restore protected files (silent)
|
// Step 7: Restore protected files (silent)
|
||||||
if (backedUp.length > 0) {
|
if (backedUp.length > 0) {
|
||||||
for (const file of backedUp) {
|
for (const file of backedUp) {
|
||||||
const backupPath = join(backupDir, file.path)
|
const backupPath = join(backupDir, file.path)
|
||||||
if (!existsSync(backupPath)) continue
|
if (!existsSync(backupPath)) continue
|
||||||
|
|
||||||
const destPath = join(process.cwd(), file.path)
|
const destPath = join(process.cwd(), file.path)
|
||||||
mkdirSync(dirname(destPath), { recursive: true })
|
mkdirSync(dirname(destPath), { recursive: true })
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (file.isDir) {
|
if (file.isDir) {
|
||||||
rmSync(destPath, { recursive: true, force: true })
|
rmSync(destPath, { recursive: true, force: true })
|
||||||
@@ -542,12 +542,12 @@ async function performUpdate() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 8: Cleanup temporary files (silent)
|
// Step 8: Cleanup temporary files (silent)
|
||||||
rmSync(archivePath, { force: true })
|
rmSync(archivePath, { force: true })
|
||||||
rmSync(extractDir, { recursive: true, force: true })
|
rmSync(extractDir, { recursive: true, force: true })
|
||||||
rmSync(backupDir, { recursive: true, force: true })
|
rmSync(backupDir, { recursive: true, force: true })
|
||||||
|
|
||||||
// Step 9: Create update marker for bot restart detection
|
// Step 9: Create update marker for bot restart detection
|
||||||
const updateMarkerPath = join(process.cwd(), '.update-happened')
|
const updateMarkerPath = join(process.cwd(), '.update-happened')
|
||||||
writeFileSync(updateMarkerPath, JSON.stringify({
|
writeFileSync(updateMarkerPath, JSON.stringify({
|
||||||
@@ -556,7 +556,7 @@ async function performUpdate() {
|
|||||||
toVersion: versionCheck.remoteVersion,
|
toVersion: versionCheck.remoteVersion,
|
||||||
method: 'github-api'
|
method: 'github-api'
|
||||||
}, null, 2))
|
}, null, 2))
|
||||||
|
|
||||||
// Step 10: Install dependencies & rebuild
|
// Step 10: Install dependencies & rebuild
|
||||||
const hasNpm = await which('npm')
|
const hasNpm = await which('npm')
|
||||||
if (!hasNpm) {
|
if (!hasNpm) {
|
||||||
@@ -570,15 +570,15 @@ async function performUpdate() {
|
|||||||
await run('npm', ['install', '--silent'], { stdio: 'ignore' })
|
await run('npm', ['install', '--silent'], { stdio: 'ignore' })
|
||||||
}
|
}
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
|
|
||||||
process.stdout.write('🔨 Building project...')
|
process.stdout.write('🔨 Building project...')
|
||||||
const buildCode = await run('npm', ['run', 'build'], { stdio: 'ignore' })
|
const buildCode = await run('npm', ['run', 'build'], { stdio: 'ignore' })
|
||||||
|
|
||||||
if (buildCode !== 0) {
|
if (buildCode !== 0) {
|
||||||
// Build failed - rollback
|
// Build failed - rollback
|
||||||
process.stdout.write(' ❌\n')
|
process.stdout.write(' ❌\n')
|
||||||
console.log('⚠️ Build failed, rolling back to previous version...')
|
console.log('⚠️ Build failed, rolling back to previous version...')
|
||||||
|
|
||||||
// Restore from rollback
|
// Restore from rollback
|
||||||
for (const file of criticalFiles) {
|
for (const file of criticalFiles) {
|
||||||
const srcPath = join(rollbackDir, file)
|
const srcPath = join(rollbackDir, file)
|
||||||
@@ -595,14 +595,14 @@ async function performUpdate() {
|
|||||||
// Continue
|
// Continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('✅ Rollback complete - using previous version')
|
console.log('✅ Rollback complete - using previous version')
|
||||||
rmSync(rollbackDir, { recursive: true, force: true })
|
rmSync(rollbackDir, { recursive: true, force: true })
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
|
|
||||||
// Step 11: Verify integrity (check if critical files exist)
|
// Step 11: Verify integrity (check if critical files exist)
|
||||||
process.stdout.write('🔍 Verifying integrity...')
|
process.stdout.write('🔍 Verifying integrity...')
|
||||||
const criticalPaths = [
|
const criticalPaths = [
|
||||||
@@ -610,7 +610,7 @@ async function performUpdate() {
|
|||||||
'package.json',
|
'package.json',
|
||||||
'src/index.ts'
|
'src/index.ts'
|
||||||
]
|
]
|
||||||
|
|
||||||
let integrityOk = true
|
let integrityOk = true
|
||||||
for (const path of criticalPaths) {
|
for (const path of criticalPaths) {
|
||||||
if (!existsSync(join(process.cwd(), path))) {
|
if (!existsSync(join(process.cwd(), path))) {
|
||||||
@@ -618,11 +618,11 @@ async function performUpdate() {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!integrityOk) {
|
if (!integrityOk) {
|
||||||
process.stdout.write(' ❌\n')
|
process.stdout.write(' ❌\n')
|
||||||
console.log('⚠️ Integrity check failed, rolling back...')
|
console.log('⚠️ Integrity check failed, rolling back...')
|
||||||
|
|
||||||
// Restore from rollback
|
// Restore from rollback
|
||||||
for (const file of criticalFiles) {
|
for (const file of criticalFiles) {
|
||||||
const srcPath = join(rollbackDir, file)
|
const srcPath = join(rollbackDir, file)
|
||||||
@@ -639,19 +639,19 @@ async function performUpdate() {
|
|||||||
// Continue
|
// Continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('✅ Rollback complete - using previous version')
|
console.log('✅ Rollback complete - using previous version')
|
||||||
rmSync(rollbackDir, { recursive: true, force: true })
|
rmSync(rollbackDir, { recursive: true, force: true })
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
process.stdout.write(' ✓\n')
|
process.stdout.write(' ✓\n')
|
||||||
|
|
||||||
// Clean rollback backup on success
|
// Clean rollback backup on success
|
||||||
rmSync(rollbackDir, { recursive: true, force: true })
|
rmSync(rollbackDir, { recursive: true, force: true })
|
||||||
|
|
||||||
console.log(`\n✅ Updated successfully! (${versionCheck.localVersion} → ${versionCheck.remoteVersion})`)
|
console.log(`\n✅ Updated successfully! (${versionCheck.localVersion} → ${versionCheck.remoteVersion})`)
|
||||||
|
|
||||||
// Different behavior for Docker vs Host
|
// Different behavior for Docker vs Host
|
||||||
if (updateMode === 'docker') {
|
if (updateMode === 'docker') {
|
||||||
console.log('<27> Docker mode: Update complete')
|
console.log('<27> Docker mode: Update complete')
|
||||||
@@ -676,11 +676,11 @@ async function performUpdate() {
|
|||||||
function cleanup() {
|
function cleanup() {
|
||||||
const tempDirs = [
|
const tempDirs = [
|
||||||
'.update-backup',
|
'.update-backup',
|
||||||
'.update-rollback',
|
'.update-rollback',
|
||||||
'.update-extract',
|
'.update-extract',
|
||||||
'.update-download.zip'
|
'.update-download.zip'
|
||||||
]
|
]
|
||||||
|
|
||||||
for (const dir of tempDirs) {
|
for (const dir of tempDirs) {
|
||||||
const path = join(process.cwd(), dir)
|
const path = join(process.cwd(), dir)
|
||||||
try {
|
try {
|
||||||
@@ -705,7 +705,7 @@ async function main() {
|
|||||||
'src/config.json',
|
'src/config.json',
|
||||||
'config.json'
|
'config.json'
|
||||||
])
|
])
|
||||||
|
|
||||||
if (configData?.update?.enabled === false) {
|
if (configData?.update?.enabled === false) {
|
||||||
console.log('\n⚠️ Updates are disabled in config.jsonc')
|
console.log('\n⚠️ Updates are disabled in config.jsonc')
|
||||||
console.log('To enable: set "update.enabled" to true in src/config.jsonc\n')
|
console.log('To enable: set "update.enabled" to true in src/config.jsonc\n')
|
||||||
@@ -722,10 +722,10 @@ async function main() {
|
|||||||
try {
|
try {
|
||||||
const code = await performUpdate()
|
const code = await performUpdate()
|
||||||
clearTimeout(timeout)
|
clearTimeout(timeout)
|
||||||
|
|
||||||
// Final cleanup of temporary files
|
// Final cleanup of temporary files
|
||||||
cleanup()
|
cleanup()
|
||||||
|
|
||||||
process.exit(code)
|
process.exit(code)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
clearTimeout(timeout)
|
clearTimeout(timeout)
|
||||||
@@ -41,15 +41,12 @@ npm start
|
|||||||
**Usage:**
|
**Usage:**
|
||||||
```bash
|
```bash
|
||||||
# Run update manually
|
# Run update manually
|
||||||
node setup/update/update.mjs
|
node scripts/installer/update.mjs
|
||||||
```
|
```
|
||||||
|
|
||||||
**Automatic updates:** The bot checks for updates on startup (controlled by `update.enabled` in config.jsonc).
|
**Automatic updates:** The bot checks for updates on startup (controlled by `update.enabled` in config.jsonc).
|
||||||
|
|
||||||
### update/setup.mjs
|
**Note:** Installer scripts have been moved to `scripts/installer/` directory. See `scripts/README.md` for details.
|
||||||
**Interactive setup wizard** used by setup.bat/setup.sh.
|
|
||||||
|
|
||||||
This is typically not run directly - use the wrapper scripts instead.
|
|
||||||
|
|
||||||
## Quick Start Guide
|
## Quick Start Guide
|
||||||
|
|
||||||
|
|||||||
@@ -751,7 +751,7 @@ export class MicrosoftRewardsBot {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
const scriptRel = upd.scriptPath || 'setup/update/update.mjs'
|
const scriptRel = upd.scriptPath || 'scripts/installer/update.mjs'
|
||||||
const scriptAbs = path.join(process.cwd(), scriptRel)
|
const scriptAbs = path.join(process.cwd(), scriptRel)
|
||||||
|
|
||||||
if (!fs.existsSync(scriptAbs)) {
|
if (!fs.existsSync(scriptAbs)) {
|
||||||
|
|||||||
Reference in New Issue
Block a user