Cleanup v2 codebase (#403)

* Remove diagnostics system

* Remove Analytics system

* Remove Buy Mode feature

* Remove Scheduler/heartbeat

* Index: remove printBanner

* Remove auto-update functionality
This commit is contained in:
Simon Gardling
2025-11-03 15:16:38 -05:00
committed by GitHub
parent 8e9c6308d5
commit 169faf2b70
15 changed files with 9 additions and 1366 deletions

View File

@@ -23,8 +23,6 @@
"start": "node --enable-source-maps ./dist/index.js",
"ts-start": "node --loader ts-node/esm ./src/index.ts",
"dev": "ts-node ./src/index.ts -dev",
"ts-schedule": "ts-node ./src/scheduler.ts",
"start:schedule": "node --enable-source-maps ./dist/scheduler.js",
"lint": "eslint \"src/**/*.{ts,tsx}\"",
"prepare": "npm run build",
"setup": "node ./setup/update/setup.mjs",

View File

@@ -42,13 +42,6 @@ class Browser {
const legacyHeadless = (this.bot.config as { headless?: boolean }).headless
const nestedHeadless = (this.bot.config.browser as { headless?: boolean } | undefined)?.headless
let headlessValue = envForceHeadless ? true : (legacyHeadless ?? nestedHeadless ?? false)
if (this.bot.isBuyModeEnabled() && !envForceHeadless) {
if (headlessValue !== false) {
const target = this.bot.getBuyModeTarget()
this.bot.log(this.bot.isMobile, 'BROWSER', `Buy mode detected${target ? ` for ${target}` : ''}; forcing headless=false so captchas and manual flows remain interactive.`, 'warn')
}
headlessValue = false
}
const headless: boolean = Boolean(headlessValue)
const engineName = 'chromium' // current hard-coded engine

View File

@@ -158,9 +158,6 @@ export default class BrowserFunc {
if (!scriptContent) {
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Dashboard script not found on first try, attempting recovery', 'warn')
await this.bot.browser.utils.captureDiagnostics(target, 'dashboard-data-missing').catch((e) => {
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Failed to capture diagnostics: ${e}`, 'warn')
})
// Force a navigation retry once before failing hard
try {
@@ -222,9 +219,6 @@ export default class BrowserFunc {
// Log a snippet of the script content for debugging
const scriptPreview = scriptContent.substring(0, 200)
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Script preview: ${scriptPreview}`, 'warn')
await this.bot.browser.utils.captureDiagnostics(target, 'dashboard-data-parse').catch((e) => {
this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', `Failed to capture diagnostics: ${e}`, 'warn')
})
throw this.bot.log(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Unable to parse dashboard script', 'error')
}

View File

@@ -2,7 +2,6 @@ import { Page } from 'rebrowser-playwright'
import { load } from 'cheerio'
import { MicrosoftRewardsBot } from '../index'
import { captureDiagnostics as captureSharedDiagnostics } from '../util/Diagnostics'
type DismissButton = { selector: string; label: string; isXPath?: boolean }
@@ -217,12 +216,4 @@ export default class BrowserUtil {
} catch { /* swallow */ }
}
/**
* Capture minimal diagnostics for a page: screenshot + HTML content.
* Files are written under ./reports/<date>/ with a safe label.
*/
async captureDiagnostics(page: Page, label: string): Promise<void> {
await captureSharedDiagnostics(this.bot, page, label)
}
}

View File

@@ -50,20 +50,6 @@
"passesPerRun": 3
},
"schedule": {
// Built-in scheduler (no cron needed in containers)
"enabled": false,
// Time format options:
// - US style with AM/PM → useAmPm: true and time12 (e.g., "9:00 AM")
// - 24-hour style → useAmPm: false and time24 (e.g., "09:00")
"useAmPm": false,
"time12": "9:00 AM",
"time24": "09:00",
// IANA timezone (e.g., "Europe/Paris", "America/New_York" check schedule.md)
"timeZone": "Europe/Paris",
// If true, run immediately on process start
"runImmediatelyOnStart": false
},
"jobState": {
// Save state to avoid duplicate work across restarts
@@ -250,94 +236,6 @@
],
// Email redaction toggle (true = secure, false = full emails)
"redactEmails": true
},
"diagnostics": {
// Capture minimal evidence on failures (screenshots/HTML)
"enabled": true,
"saveScreenshot": true,
"saveHtml": true,
"maxPerRun": 2,
"retentionDays": 7
},
"analytics": {
// 📈 Performance Dashboard: tracks points earned, success rates, execution times
// Useful for monitoring your stats over time. Disable if you don't need it.
// WHAT IT DOES:
// - Collects daily/weekly/monthly statistics
// - Calculates success rates for each activity type
// - Tracks average execution times
// - Generates trend reports
// - Can export to Markdown or send via webhook
"enabled": true,
// How long to keep analytics data (days)
"retentionDays": 30,
// Generate markdown summary reports
"exportMarkdown": true,
// Send analytics summary via webhook
"webhookSummary": true
},
// ============================================================
// 🛒 BUY MODE
// ============================================================
"buyMode": {
// Manual purchase/redeem mode. Use CLI -buy to enable
// Session duration cap in minutes
"maxMinutes": 45
},
// ============================================================
// 🔄 UPDATES
// ============================================================
"update": {
// Post-run auto-update settings
"git": true,
"docker": false,
// Custom updater script path (relative to repo root)
"scriptPath": "setup/update/update.mjs",
// ⚠️ SMART UPDATE CONTROL - How It Really Works:
//
// BACKUP: Your files are ALWAYS backed up to .update-backup/ before any update
//
// UPDATE PROCESS:
// 1. Script checks if remote modified config.jsonc or accounts.json
// 2. Runs "git pull --rebase" to merge remote changes
// 3. Git intelligently merges:
// ✅ NEW FIELDS ADDED (new config options, new account properties)
// → Your existing values are PRESERVED, new fields are added alongside
// → This is 95% of updates - works perfectly without conflicts
//
// ⚠️ MAJOR RESTRUCTURING (fields renamed, sections reordered, format changed)
// → Git may choose one version over the other
// → Risk of losing your custom values in restructured sections
//
// WHAT THE OPTIONS DO:
// - true: ACCEPT git merge result (keeps new features + your settings in most cases)
// - false: REJECT remote changes, RESTORE your local file from backup (stay on old version)
//
// RECOMMENDED: Keep both TRUE
// Why? Because we rarely restructure files. Most updates just ADD new optional fields.
// Your passwords, emails, and custom settings survive addition-only updates.
// Only risk: major file restructuring (rare, usually announced in release notes).
//
// SAFETY NET: Check .update-backup/ folder after updates to compare if worried.
// Apply remote updates to config.jsonc via git merge
// true = accept new features + intelligent merge (RECOMMENDED for most users)
// false = always keep your local version (miss new config options)
"autoUpdateConfig": true,
// Apply remote updates to accounts.json via git merge
// true = accept new fields (like "region", "totpSecret") while keeping credentials (RECOMMENDED)
// false = always keep your local accounts file (safest but may miss new optional fields)
"autoUpdateAccounts": true
}
}

View File

@@ -8,7 +8,6 @@ import { AxiosRequestConfig } from 'axios'
import { generateTOTP } from '../util/Totp'
import { saveSessionData } from '../util/Load'
import { MicrosoftRewardsBot } from '../index'
import { captureDiagnostics } from '../util/Diagnostics'
import { OAuth } from '../interface/OAuth'
// -------------------------------
@@ -593,8 +592,7 @@ export class Login {
const fallbackSelector = await this.waitForRewardsRoot(page, 6000)
if (!fallbackSelector) {
await this.bot.browser.utils.captureDiagnostics(page, 'login-portal-missing').catch(()=>{})
throw this.bot.log(this.bot.isMobile, 'LOGIN', 'Portal root element missing after navigation (saved diagnostics to reports/)', 'error')
throw this.bot.log(this.bot.isMobile, 'LOGIN', 'Portal root element missing after navigation', 'error')
}
this.bot.log(this.bot.isMobile, 'LOGIN', `Reached rewards portal via fallback (${fallbackSelector})`)
return
@@ -712,7 +710,6 @@ export class Login {
this.bot.compromisedReason = 'sign-in-blocked'
this.startCompromisedInterval()
await this.bot.engageGlobalStandby('sign-in-blocked', email).catch(()=>{})
await this.saveIncidentArtifacts(page,'sign-in-blocked').catch(()=>{})
// Open security docs for immediate guidance (best-effort)
await this.openDocsTab(page, docsUrl).catch(()=>{})
return true
@@ -823,7 +820,6 @@ export class Login {
this.bot.compromisedReason = 'recovery-mismatch'
this.startCompromisedInterval()
await this.bot.engageGlobalStandby('recovery-mismatch', email).catch(()=>{})
await this.saveIncidentArtifacts(page,'recovery-mismatch').catch(()=>{})
await this.openDocsTab(page, docsUrl).catch(()=>{})
} else {
const mode = observedPrefix.length === 1 ? 'lenient' : 'strict'
@@ -885,9 +881,6 @@ export class Login {
}, 5*60*1000)
}
private async saveIncidentArtifacts(page: Page, slug: string) {
await captureDiagnostics(this.bot, page, slug, { scope: 'security', skipSlot: true, force: true })
}
private async openDocsTab(page: Page, url: string) {
try {

View File

@@ -168,7 +168,6 @@ export class Workers {
await this.applyThrottle(throttle, 1200, 2600)
} catch (error) {
await this.bot.browser.utils.captureDiagnostics(activityPage, `activity_error_${activity.title || activity.offerId}`)
this.bot.log(this.bot.isMobile, 'ACTIVITY', 'An error occurred:' + error, 'error')
throttle.record(false)
}
@@ -227,7 +226,6 @@ export class Workers {
await runWithTimeout(this.bot.activities.run(page, activity))
throttle.record(true)
} catch (e) {
await this.bot.browser.utils.captureDiagnostics(page, `activity_timeout_${activity.title || activity.offerId}`)
throttle.record(false)
throw e
}

View File

@@ -123,7 +123,6 @@ export class Quiz extends Workers {
this.bot.log(this.bot.isMobile, 'QUIZ', 'Completed the quiz successfully')
} catch (error) {
await this.bot.browser.utils.captureDiagnostics(page, 'quiz_error')
await page.close()
this.bot.log(this.bot.isMobile, 'QUIZ', 'An error occurred:' + error, 'error')
}

View File

@@ -20,7 +20,6 @@ import { Account } from './interface/Account'
import Axios from './util/Axios'
import fs from 'fs'
import path from 'path'
import { spawn } from 'child_process'
import Humanizer from './util/Humanizer'
import { detectBanReason } from './util/BanDetector'
@@ -57,18 +56,12 @@ export class MicrosoftRewardsBot {
private workers: Workers
private login = new Login(this)
private accessToken: string = ''
// Buy mode (manual spending) tracking
private buyMode: { enabled: boolean; email?: string } = { enabled: false }
// Summary collection (per process)
private accountSummaries: AccountSummary[] = []
private runId: string = Math.random().toString(36).slice(2)
private diagCount: number = 0
private bannedTriggered: { email: string; reason: string } | null = null
private globalStandby: { active: boolean; reason?: string } = { active: false }
// Scheduler heartbeat integration
private heartbeatFile?: string
private heartbeatTimer?: NodeJS.Timeout
public axios!: Axios
@@ -87,61 +80,17 @@ export class MicrosoftRewardsBot {
this.humanizer = new Humanizer(this.utils, this.config.humanization)
this.activeWorkers = this.config.clusters
this.mobileRetryAttempts = 0
// Buy mode: CLI args take precedence over config
const idx = process.argv.indexOf('-buy')
if (idx >= 0) {
const target = process.argv[idx + 1]
this.buyMode = target && /@/.test(target)
? { enabled: true, email: target }
: { enabled: true }
} else {
// Fallback to config if no CLI flag
const buyModeConfig = this.config.buyMode as { enabled?: boolean } | undefined
if (buyModeConfig?.enabled === true) {
this.buyMode.enabled = true
}
}
}
public isBuyModeEnabled(): boolean {
return this.buyMode.enabled === true
}
public getBuyModeTarget(): string | undefined {
return this.buyMode.email
}
async initialize() {
this.accounts = loadAccounts()
}
async run() {
this.printBanner()
log('main', 'MAIN', `Bot started with ${this.config.clusters} clusters`)
// If scheduler provided a heartbeat file, update it periodically to signal liveness
const hbFile = process.env.SCHEDULER_HEARTBEAT_FILE
if (hbFile) {
try {
const dir = path.dirname(hbFile)
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true })
fs.writeFileSync(hbFile, String(Date.now()))
this.heartbeatFile = hbFile
this.heartbeatTimer = setInterval(() => {
try { fs.writeFileSync(hbFile, String(Date.now())) } catch { /* ignore */ }
}, 60_000)
} catch { /* ignore */ }
}
// If buy mode is enabled, run single-account interactive session without automation
if (this.buyMode.enabled) {
const targetInfo = this.buyMode.email ? ` for ${this.buyMode.email}` : ''
log('main', 'BUY-MODE', `Buy mode ENABLED${targetInfo}. We'll open 2 tabs: (1) a monitor tab that auto-refreshes to track points, (2) your browsing tab to redeem/purchase freely.`, 'log', 'green')
log('main', 'BUY-MODE', 'The monitor tab may refresh every ~10s. Use the other tab for your actions; monitoring is passive and non-intrusive.', 'log', 'yellow')
await this.runBuyMode()
return
}
// Only cluster when there's more than 1 cluster demanded
if (this.config.clusters > 1) {
@@ -155,229 +104,6 @@ export class MicrosoftRewardsBot {
}
}
/** Manual spending session: login, then leave control to user while we passively monitor points. */
private async runBuyMode() {
try {
await this.initialize()
const email = this.buyMode.email || (this.accounts[0]?.email)
const account = this.accounts.find(a => a.email === email) || this.accounts[0]
if (!account) throw new Error('No account available for buy mode')
this.isMobile = false
this.axios = new Axios(account.proxy)
const browser = await this.browserFactory.createBrowser(account.proxy, account.email)
// Open the monitor tab FIRST so auto-refresh happens out of the way
let monitor = await browser.newPage()
await this.login.login(monitor, account.email, account.password, account.totp)
await this.browser.func.goHome(monitor)
this.log(false, 'BUY-MODE', 'Opened MONITOR tab (auto-refreshes to track points).', 'log', 'yellow')
// Then open the user free-browsing tab SECOND so users dont see the refreshes
const page = await browser.newPage()
await this.browser.func.goHome(page)
this.log(false, 'BUY-MODE', 'Opened USER tab (use this one to redeem/purchase freely).', 'log', 'green')
// Helper to recreate monitor tab if the user closes it
const recreateMonitor = async () => {
try { if (!monitor.isClosed()) await monitor.close() } catch { /* ignore */ }
monitor = await browser.newPage()
await this.browser.func.goHome(monitor)
}
// Helper to send an immediate spend notice via webhooks/NTFY
const sendSpendNotice = async (delta: number, nowPts: number, cumulativeSpent: number) => {
try {
const { ConclusionWebhook } = await import('./util/ConclusionWebhook')
await ConclusionWebhook(
this.config,
'💳 Spend Detected',
`**Account:** ${account.email}\n**Spent:** -${delta} points\n**Current:** ${nowPts} points\n**Session spent:** ${cumulativeSpent} points`,
undefined,
0xFFAA00
)
} catch (e) {
this.log(false, 'BUY-MODE', `Failed to send spend notice: ${e instanceof Error ? e.message : e}`, 'warn')
}
}
let initial = 0
try {
const data = await this.browser.func.getDashboardData(monitor)
initial = data.userStatus.availablePoints || 0
} catch {/* ignore */}
this.log(false, 'BUY-MODE', `Logged in as ${account.email}. Buy mode is active: monitor tab auto-refreshes; user tab is free for your actions. We'll observe points passively.`)
// Passive watcher: poll points periodically without clicking.
const start = Date.now()
let last = initial
let spent = 0
const buyModeConfig = this.config.buyMode as { maxMinutes?: number } | undefined
const maxMinutes = Math.max(10, buyModeConfig?.maxMinutes ?? 45)
const endAt = start + maxMinutes * 60 * 1000
while (Date.now() < endAt) {
await this.utils.wait(10000)
// If monitor tab was closed by user, recreate it quietly
try {
if (monitor.isClosed()) {
this.log(false, 'BUY-MODE', 'Monitor tab was closed; reopening in background...', 'warn')
await recreateMonitor()
}
} catch { /* ignore */ }
try {
const data = await this.browser.func.getDashboardData(monitor)
const nowPts = data.userStatus.availablePoints || 0
if (nowPts < last) {
// Points decreased -> likely spent
const delta = last - nowPts
spent += delta
last = nowPts
this.log(false, 'BUY-MODE', `Detected spend: -${delta} points (current: ${nowPts})`)
// Immediate spend notice
await sendSpendNotice(delta, nowPts, spent)
} else if (nowPts > last) {
last = nowPts
}
} catch (err) {
// If we lost the page context, recreate the monitor tab and continue
const msg = err instanceof Error ? err.message : String(err)
if (/Target closed|page has been closed|browser has been closed/i.test(msg)) {
this.log(false, 'BUY-MODE', 'Monitor page closed or lost; recreating...', 'warn')
try { await recreateMonitor() } catch { /* ignore */ }
}
// Swallow other errors to avoid disrupting the user
}
}
// Save cookies and close monitor; keep main page open for user until they close it themselves
try {
await saveSessionData(this.config.sessionPath, browser, account.email, this.isMobile)
} catch (e) {
log(false, 'BUY-MODE', `Failed to save session: ${e instanceof Error ? e.message : String(e)}`, 'warn')
}
try { if (!monitor.isClosed()) await monitor.close() } catch {/* ignore */}
// Send a final minimal conclusion webhook for this manual session
const summary: AccountSummary = {
email: account.email,
durationMs: Date.now() - start,
desktopCollected: 0,
mobileCollected: 0,
totalCollected: -spent, // negative indicates spend
initialTotal: initial,
endTotal: last,
errors: [],
banned: { status: false, reason: '' }
}
await this.sendConclusion([summary])
this.log(false, 'BUY-MODE', 'Buy mode session finished (monitoring period ended). You can close the browser when done.')
} catch (e) {
this.log(false, 'BUY-MODE', `Error in buy mode: ${e instanceof Error ? e.message : e}`, 'error')
}
}
private printBanner() {
// Only print once (primary process or single cluster execution)
if (this.config.clusters > 1 && !cluster.isPrimary) return
const banner = `
╔═══════════════════════════════════════════════════════════════════════════╗
║ ║
║ ███╗ ███╗███████╗ ██████╗ ███████╗██╗ ██╗ █████╗ ██████╗ ██████╗ ███████╗ ║
║ ████╗ ████║██╔════╝ ██╔══██╗██╔════╝██║ ██║██╔══██╗██╔══██╗██╔══██╗██╔════╝ ║
║ ██╔████╔██║███████╗ ██████╔╝█████╗ ██║ █╗ ██║███████║██████╔╝██║ ██║███████╗ ║
║ ██║╚██╔╝██║╚════██║ ██╔══██╗██╔══╝ ██║███╗██║██╔══██║██╔══██╗██║ ██║╚════██║ ║
║ ██║ ╚═╝ ██║███████║ ██║ ██║███████╗╚███╔███╔╝██║ ██║██║ ██║██████╔╝███████║ ║
║ ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝╚══════╝ ╚══╝╚══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═════╝ ╚══════╝ ║
║ ║
║ TypeScript • Playwright • Intelligent Automation ║
║ ║
╚═══════════════════════════════════════════════════════════════════════════╝
`
const buyModeBanner = `
╔══════════════════════════════════════════════════════╗
║ ║
║ ███╗ ███╗███████╗ ██████╗ ██╗ ██╗██╗ ██╗ ║
║ ████╗ ████║██╔════╝ ██╔══██╗██║ ██║╚██╗ ██╔╝ ║
║ ██╔████╔██║███████╗ ██████╔╝██║ ██║ ╚████╔╝ ║
║ ██║╚██╔╝██║╚════██║ ██╔══██╗██║ ██║ ╚██╔╝ ║
║ ██║ ╚═╝ ██║███████║ ██████╔╝╚██████╔╝ ██║ ║
║ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ║
║ ║
║ Manual Purchase Mode • Passive Monitoring ║
║ ║
╚══════════════════════════════════════════════════════╝
`
// Read package version and build banner info
const pkgPath = path.join(__dirname, '../', 'package.json')
let version = 'unknown'
try {
if (fs.existsSync(pkgPath)) {
const raw = fs.readFileSync(pkgPath, 'utf-8')
const pkg = JSON.parse(raw)
version = pkg.version || version
}
} catch {
// Ignore version read errors
}
// Display appropriate banner based on mode
const displayBanner = this.buyMode.enabled ? buyModeBanner : banner
console.log(displayBanner)
console.log('='.repeat(80))
if (this.buyMode.enabled) {
console.log(` Version: ${version} | Process: ${process.pid} | Buy Mode: Active`)
console.log(` Target: ${this.buyMode.email || 'First account'} | Documentation: buy-mode.md`)
} else {
console.log(` Version: ${version} | Process: ${process.pid} | Clusters: ${this.config.clusters}`)
// Replace visibility/parallel with concise enabled feature status
const upd = this.config.update || {}
const updTargets: string[] = []
if (upd.git !== false) updTargets.push('Git')
if (upd.docker) updTargets.push('Docker')
if (updTargets.length > 0) {
console.log(` Update: ${updTargets.join(', ')}`)
}
const sched = this.config.schedule || {}
const schedEnabled = !!sched.enabled
if (!schedEnabled) {
console.log(' Schedule: OFF')
} else {
// Determine active format + time string to display
const tz = sched.timeZone || 'UTC'
let formatName = ''
let timeShown = ''
const srec: Record<string, unknown> = sched as unknown as Record<string, unknown>
const useAmPmVal = typeof srec['useAmPm'] === 'boolean' ? (srec['useAmPm'] as boolean) : undefined
const time12Val = typeof srec['time12'] === 'string' ? String(srec['time12']) : undefined
const time24Val = typeof srec['time24'] === 'string' ? String(srec['time24']) : undefined
if (useAmPmVal === true) {
formatName = 'AM/PM'
timeShown = time12Val || sched.time || '9:00 AM'
} else if (useAmPmVal === false) {
formatName = '24h'
timeShown = time24Val || sched.time || '09:00'
} else {
// Back-compat: infer from provided fields if possible
if (time24Val && time24Val.trim()) { formatName = '24h'; timeShown = time24Val }
else if (time12Val && time12Val.trim()) { formatName = 'AM/PM'; timeShown = time12Val }
else { formatName = 'legacy'; timeShown = sched.time || '09:00' }
}
console.log(` Schedule: ON — ${formatName}${timeShown} • TZ=${tz}`)
}
}
console.log('='.repeat(80) + '\n')
}
// Return summaries (used when clusters==1)
public getSummaries() {
return this.accountSummaries
@@ -443,21 +169,13 @@ export class MicrosoftRewardsBot {
// Check if all workers have exited
if (this.activeWorkers === 0) {
// All workers done -> send conclusion (if enabled), run optional auto-update, then exit
// All workers done
(async () => {
try {
await this.sendConclusion(this.accountSummaries)
} catch {/* ignore */}
try {
await this.runAutoUpdate()
} catch {/* ignore */}
// Only exit if not spawned by scheduler
if (!process.env.SCHEDULER_HEARTBEAT_FILE) {
log('main', 'MAIN-WORKER', 'All workers destroyed. Exiting main process!', 'warn')
process.exit(0)
} else {
log('main', 'MAIN-WORKER', 'All workers destroyed. Scheduler mode: returning control to scheduler.')
}
log('main', 'MAIN-WORKER', 'All workers destroyed. Exiting main process!', 'warn')
process.exit(0)
})()
}
})
@@ -661,7 +379,6 @@ export class MicrosoftRewardsBot {
// If any account is flagged compromised, do NOT exit; keep the process alive so the browser stays open
if (this.compromisedModeActive || this.globalStandby.active) {
log('main','SECURITY','Compromised or banned detected. Global standby engaged: we will NOT proceed to other accounts until resolved. Keeping process alive. Press CTRL+C to exit when done. Security check by @Light','warn','yellow')
// Periodic heartbeat with cleanup on exit
const standbyInterval = setInterval(() => {
log('main','SECURITY','Still in standby: session(s) held open for manual recovery / review...','warn','yellow')
}, 5 * 60 * 1000)
@@ -677,18 +394,9 @@ export class MicrosoftRewardsBot {
process.send({ type: 'summary', data: this.accountSummaries })
}
} else {
// Single process mode -> build and send conclusion directly
await this.sendConclusion(this.accountSummaries)
// Cleanup heartbeat timer/file at end of run
if (this.heartbeatTimer) { try { clearInterval(this.heartbeatTimer) } catch { /* ignore */ } }
if (this.heartbeatFile) { try { if (fs.existsSync(this.heartbeatFile)) fs.unlinkSync(this.heartbeatFile) } catch { /* ignore */ } }
// After conclusion, run optional auto-update
await this.runAutoUpdate().catch(() => {/* ignore update errors */})
}
// Only exit if not spawned by scheduler
if (!process.env.SCHEDULER_HEARTBEAT_FILE) {
process.exit()
// Single process mode
}
process.exit()
}
/** Send immediate ban alert if configured. */
@@ -1055,15 +763,6 @@ export class MicrosoftRewardsBot {
log('main','REPORT',`Failed to save report: ${e instanceof Error ? e.message : e}`,'warn')
}
// Cleanup old diagnostics
try {
const days = cfg.diagnostics?.retentionDays
if (typeof days === 'number' && days > 0) {
await this.cleanupOldDiagnostics(days)
}
} catch (e) {
log('main','REPORT',`Failed diagnostics cleanup: ${e instanceof Error ? e.message : e}`,'warn')
}
// Optional community notice (shown randomly in ~15% of successful runs)
if (Math.random() > 0.85 && successes > 0 && accountsWithErrors === 0) {
@@ -1072,62 +771,7 @@ export class MicrosoftRewardsBot {
}
/** Reserve one diagnostics slot for this run (caps captures). */
public tryReserveDiagSlot(maxPerRun: number): boolean {
if (this.diagCount >= Math.max(0, maxPerRun || 0)) return false
this.diagCount += 1
return true
}
/** Delete diagnostics folders older than N days under ./reports */
private async cleanupOldDiagnostics(retentionDays: number) {
const base = path.join(process.cwd(), 'reports')
if (!fs.existsSync(base)) return
const entries = fs.readdirSync(base, { withFileTypes: true })
const now = Date.now()
const keepMs = retentionDays * 24 * 60 * 60 * 1000
for (const e of entries) {
if (!e.isDirectory()) continue
const name = e.name // expect YYYY-MM-DD
const parts = name.split('-').map((n: string) => parseInt(n, 10))
if (parts.length !== 3 || parts.some(isNaN)) continue
const [yy, mm, dd] = parts
if (yy === undefined || mm === undefined || dd === undefined) continue
const dirDate = new Date(yy, mm - 1, dd).getTime()
if (isNaN(dirDate)) continue
if (now - dirDate > keepMs) {
const dirPath = path.join(base, name)
try { fs.rmSync(dirPath, { recursive: true, force: true }) } catch { /* ignore */ }
}
}
}
// Run optional auto-update script based on configuration flags.
private async runAutoUpdate(): Promise<void> {
const upd = this.config.update
if (!upd) return
const scriptRel = upd.scriptPath || 'setup/update/update.mjs'
const scriptAbs = path.join(process.cwd(), scriptRel)
if (!fs.existsSync(scriptAbs)) return
const args: string[] = []
// Git update is enabled by default (unless explicitly set to false)
if (upd.git !== false) args.push('--git')
if (upd.docker) args.push('--docker')
if (args.length === 0) return
// Pass scheduler flag to update script so it doesn't exit
const isSchedulerMode = !!process.env.SCHEDULER_HEARTBEAT_FILE
const env = isSchedulerMode
? { ...process.env, FROM_SCHEDULER: '1' }
: process.env
await new Promise<void>((resolve) => {
const child = spawn(process.execPath, [scriptAbs, ...args], { stdio: 'inherit', env })
child.on('close', () => resolve())
child.on('error', () => resolve())
})
}
/** Public entry-point to engage global security standby from other modules (idempotent). */
public async engageGlobalStandby(reason: string, email?: string): Promise<void> {
@@ -1195,7 +839,6 @@ async function main() {
}
const gracefulExit = (code: number) => {
try { rewardsBot['heartbeatTimer'] && clearInterval(rewardsBot['heartbeatTimer']) } catch { /* ignore */ }
if (config?.crashRecovery?.autoRestart && code !== 0) {
const max = config.crashRecovery.maxRestarts ?? 2
if (crashState.restarts < max) {
@@ -1231,4 +874,4 @@ if (require.main === module) {
log('main', 'MAIN-ERROR', `Error running bots: ${error}`, 'error')
process.exit(1)
})
}
}

View File

@@ -22,15 +22,10 @@ export interface Config {
webhook: ConfigWebhook;
conclusionWebhook?: ConfigWebhook; // Optional secondary webhook for final summary
ntfy: ConfigNtfy;
diagnostics?: ConfigDiagnostics;
update?: ConfigUpdate;
schedule?: ConfigSchedule;
passesPerRun?: number;
buyMode?: ConfigBuyMode; // Optional manual spending mode
vacation?: ConfigVacation; // Optional monthly contiguous off-days
crashRecovery?: ConfigCrashRecovery; // Automatic restart / graceful shutdown
riskManagement?: ConfigRiskManagement; // NEW: Risk-aware throttling and ban prediction
analytics?: ConfigAnalytics; // NEW: Performance dashboard and metrics tracking
dryRun?: boolean; // NEW: Dry-run mode (simulate without executing)
queryDiversity?: ConfigQueryDiversity; // NEW: Multi-source query generation
}
@@ -83,39 +78,6 @@ export interface ConfigProxy {
proxyBingTerms: boolean;
}
export interface ConfigDiagnostics {
enabled?: boolean; // master toggle
saveScreenshot?: boolean; // capture .png
saveHtml?: boolean; // capture .html
maxPerRun?: number; // cap number of captures per run
retentionDays?: number; // delete older diagnostic folders
}
export interface ConfigUpdate {
git?: boolean; // if true, run git pull + npm ci + npm run build after completion
docker?: boolean; // if true, run docker update routine (compose pull/up) after completion
scriptPath?: string; // optional custom path to update script relative to repo root
autoUpdateConfig?: boolean; // if true, allow auto-update of config.jsonc when remote changes it (default: false to preserve user settings)
autoUpdateAccounts?: boolean; // if true, allow auto-update of accounts.json when remote changes it (default: false to preserve credentials)
}
export interface ConfigBuyMode {
enabled?: boolean; // if true, force buy mode session
maxMinutes?: number; // session duration cap
}
export interface ConfigSchedule {
enabled?: boolean;
time?: string; // Back-compat: accepts "HH:mm" or "h:mm AM/PM"
// New optional explicit times
time12?: string; // e.g., "9:00 AM"
time24?: string; // e.g., "09:00"
timeZone?: string; // IANA TZ e.g., "America/New_York"
useAmPm?: boolean; // If true, prefer time12 + AM/PM style; if false, prefer time24. If undefined, back-compat behavior.
runImmediatelyOnStart?: boolean; // if true, run once immediately when process starts
cron?: string | string[]; // Optional cron expression(s) (standard 5-field or 6-field) for advanced scheduling
}
export interface ConfigVacation {
enabled?: boolean; // default false
minDays?: number; // default 3
@@ -195,7 +157,7 @@ export interface ConfigLogging {
// CommunityHelp removed (privacy-first policy)
// NEW FEATURES: Risk Management, Analytics, Query Diversity
// NEW FEATURES: Risk Management, Query Diversity
export interface ConfigRiskManagement {
enabled?: boolean; // master toggle for risk-aware throttling
autoAdjustDelays?: boolean; // automatically increase delays when risk is high
@@ -204,13 +166,6 @@ export interface ConfigRiskManagement {
riskThreshold?: number; // 0-100, pause if risk exceeds this
}
export interface ConfigAnalytics {
enabled?: boolean; // track performance metrics
retentionDays?: number; // how long to keep analytics data
exportMarkdown?: boolean; // generate markdown reports
webhookSummary?: boolean; // send analytics via webhook
}
export interface ConfigQueryDiversity {
enabled?: boolean; // use multi-source query generation
sources?: Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>; // which sources to use

View File

@@ -1,418 +0,0 @@
import { DateTime, IANAZone } from 'luxon'
import cronParser from 'cron-parser'
import { spawn } from 'child_process'
import fs from 'fs'
import path from 'path'
import { MicrosoftRewardsBot } from './index'
import { loadConfig } from './util/Load'
import { log } from './util/Logger'
import type { Config } from './interface/Config'
type CronExpressionInfo = { expression: string; tz: string }
type DateTimeInstance = ReturnType<typeof DateTime.fromJSDate>
function resolveTimeParts(schedule: Config['schedule'] | undefined): { tz: string; hour: number; minute: number } {
const tz = (schedule?.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : 'UTC'
// Warn if an invalid timezone was provided
if (schedule?.timeZone && !IANAZone.isValidZone(schedule.timeZone)) {
void log('main', 'SCHEDULER', `Invalid timezone "${schedule.timeZone}" provided. Falling back to UTC. Valid zones: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones`, 'warn')
}
// Determine source string
let src = ''
if (typeof schedule?.useAmPm === 'boolean') {
if (schedule.useAmPm) src = (schedule.time12 || schedule.time || '').trim()
else src = (schedule.time24 || schedule.time || '').trim()
} else {
// Back-compat: prefer time if present; else time24 or time12
src = (schedule?.time || schedule?.time24 || schedule?.time12 || '').trim()
}
// Try to parse 24h first: HH:mm
const m24 = src.match(/^\s*(\d{1,2}):(\d{2})\s*$/i)
if (m24) {
const hh = Math.max(0, Math.min(23, parseInt(m24[1]!, 10)))
const mm = Math.max(0, Math.min(59, parseInt(m24[2]!, 10)))
return { tz, hour: hh, minute: mm }
}
// Parse 12h with AM/PM: h:mm AM or h AM
const m12 = src.match(/^\s*(\d{1,2})(?::(\d{2}))?\s*(AM|PM)\s*$/i)
if (m12) {
let hh = parseInt(m12[1]!, 10)
const mm = m12[2] ? parseInt(m12[2]!, 10) : 0
const ampm = m12[3]!.toUpperCase()
if (hh === 12) hh = 0
if (ampm === 'PM') hh += 12
hh = Math.max(0, Math.min(23, hh))
const m = Math.max(0, Math.min(59, mm))
return { tz, hour: hh, minute: m }
}
// Fallback: default 09:00
return { tz, hour: 9, minute: 0 }
}
function parseTargetToday(now: Date, schedule: Config['schedule'] | undefined) {
const { tz, hour, minute } = resolveTimeParts(schedule)
const dtn = DateTime.fromJSDate(now, { zone: tz })
return dtn.set({ hour, minute, second: 0, millisecond: 0 })
}
function normalizeCronExpressions(schedule: Config['schedule'] | undefined, fallbackTz: string): CronExpressionInfo[] {
if (!schedule) return []
const raw = schedule.cron
if (!raw) return []
const expressions = Array.isArray(raw) ? raw : [raw]
return expressions
.map(expr => (typeof expr === 'string' ? expr.trim() : ''))
.filter(expr => expr.length > 0)
.map(expr => ({ expression: expr, tz: (schedule.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : fallbackTz }))
}
function getNextCronOccurrence(after: DateTimeInstance, items: CronExpressionInfo[]): { next: DateTimeInstance; source: string } | null {
let soonest: { next: DateTimeInstance; source: string } | null = null
for (const item of items) {
try {
const iterator = cronParser.parseExpression(item.expression, {
currentDate: after.toJSDate(),
tz: item.tz
})
const nextDate = iterator.next().toDate()
const nextDt = DateTime.fromJSDate(nextDate, { zone: item.tz })
if (!soonest || nextDt < soonest.next) {
soonest = { next: nextDt, source: item.expression }
}
} catch (error) {
void log('main', 'SCHEDULER', `Invalid cron expression "${item.expression}": ${error instanceof Error ? error.message : String(error)}`, 'warn')
}
}
return soonest
}
function getNextDailyOccurrence(after: DateTimeInstance, schedule: Config['schedule'] | undefined): DateTimeInstance {
const todayTarget = parseTargetToday(after.toJSDate(), schedule)
const target = after >= todayTarget ? todayTarget.plus({ days: 1 }) : todayTarget
return target
}
function computeNextRun(after: DateTimeInstance, schedule: Config['schedule'] | undefined, cronItems: CronExpressionInfo[]): { next: DateTimeInstance; source: 'cron' | 'daily'; detail?: string } {
if (cronItems.length > 0) {
const cronNext = getNextCronOccurrence(after, cronItems)
if (cronNext) {
return { next: cronNext.next, source: 'cron', detail: cronNext.source }
}
void log('main', 'SCHEDULER', 'All cron expressions invalid; falling back to daily schedule', 'warn')
}
return { next: getNextDailyOccurrence(after, schedule), source: 'daily' }
}
async function runOnePass(): Promise<void> {
const bot = new MicrosoftRewardsBot(false)
await bot.initialize()
await bot.run()
}
/**
* Run a single pass either in-process or as a child process (default),
* with a watchdog timeout to kill stuck runs.
*/
async function runOnePassWithWatchdog(): Promise<void> {
// Heartbeat-aware watchdog configuration
// If a child is actively updating its heartbeat file, we allow it to run beyond the legacy timeout.
// Defaults are generous to allow first-day passes to finish searches with delays.
const parseEnvNumber = (key: string, fallback: number, min: number, max: number): number => {
const val = Number(process.env[key] || fallback)
if (isNaN(val) || val < min || val > max) {
void log('main', 'SCHEDULER', `Invalid ${key}="${process.env[key]}". Using default ${fallback}`, 'warn')
return fallback
}
return val
}
const staleHeartbeatMin = parseEnvNumber(
process.env.SCHEDULER_STALE_HEARTBEAT_MINUTES ? 'SCHEDULER_STALE_HEARTBEAT_MINUTES' : 'SCHEDULER_PASS_TIMEOUT_MINUTES',
30, 5, 1440
)
const graceMin = parseEnvNumber('SCHEDULER_HEARTBEAT_GRACE_MINUTES', 15, 1, 120)
const hardcapMin = parseEnvNumber('SCHEDULER_PASS_HARDCAP_MINUTES', 480, 30, 1440)
const checkEveryMs = 60_000 // check once per minute
// Validate: stale should be >= grace
if (staleHeartbeatMin < graceMin) {
await log('main', 'SCHEDULER', `Warning: STALE_HEARTBEAT (${staleHeartbeatMin}m) < GRACE (${graceMin}m). Adjusting stale to ${graceMin}m`, 'warn')
}
// Fork per pass: safer because we can terminate a stuck child without killing the scheduler
const forkPerPass = String(process.env.SCHEDULER_FORK_PER_PASS || 'true').toLowerCase() !== 'false'
if (!forkPerPass) {
// In-process fallback (cannot forcefully stop if truly stuck)
await log('main', 'SCHEDULER', `Starting pass in-process (grace ${graceMin}m • stale ${staleHeartbeatMin}m • hardcap ${hardcapMin}m). Cannot force-kill if stuck.`)
// No true watchdog possible in-process; just run
await runOnePass()
return
}
// Child process execution
const indexJs = path.join(__dirname, 'index.js')
await log('main', 'SCHEDULER', `Spawning child for pass: ${process.execPath} ${indexJs}`)
// Prepare heartbeat file path and pass to child
const cfg = loadConfig() as Config
const baseDir = path.join(process.cwd(), cfg.sessionPath || 'sessions')
const hbFile = path.join(baseDir, `heartbeat_${Date.now()}.lock`)
try { fs.mkdirSync(baseDir, { recursive: true }) } catch { /* ignore */ }
await new Promise<void>((resolve) => {
const child = spawn(process.execPath, [indexJs], { stdio: 'inherit', env: { ...process.env, SCHEDULER_HEARTBEAT_FILE: hbFile } })
let finished = false
const startedAt = Date.now()
let killTimeout: NodeJS.Timeout | undefined
const killChild = async (signal: NodeJS.Signals) => {
try {
await log('main', 'SCHEDULER', `Sending ${signal} to stuck child PID ${child.pid}`,'warn')
child.kill(signal)
} catch { /* ignore */ }
}
const timer = setInterval(() => {
if (finished) return
const now = Date.now()
const runtimeMin = Math.floor((now - startedAt) / 60000)
// Hard cap: always terminate if exceeded
if (runtimeMin >= hardcapMin) {
log('main', 'SCHEDULER', `Pass exceeded hard cap of ${hardcapMin} minutes; terminating...`, 'warn')
void killChild('SIGTERM')
if (killTimeout) clearTimeout(killTimeout)
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
return
}
// Before grace, don't judge
if (runtimeMin < graceMin) return
// Check heartbeat freshness
try {
const st = fs.statSync(hbFile)
const mtimeMs = st.mtimeMs
const ageMin = Math.floor((now - mtimeMs) / 60000)
if (ageMin >= staleHeartbeatMin) {
log('main', 'SCHEDULER', `Heartbeat stale for ${ageMin}m (>=${staleHeartbeatMin}m). Terminating child...`, 'warn')
void killChild('SIGTERM')
if (killTimeout) clearTimeout(killTimeout)
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
}
} catch (err) {
// If file missing after grace, consider stale
const msg = err instanceof Error ? err.message : String(err)
log('main', 'SCHEDULER', `Heartbeat file check failed: ${msg}. Terminating child...`, 'warn')
void killChild('SIGTERM')
if (killTimeout) clearTimeout(killTimeout)
killTimeout = setTimeout(() => { try { child.kill('SIGKILL') } catch { /* ignore */ } }, 10_000)
}
}, checkEveryMs)
child.on('exit', async (code, signal) => {
finished = true
clearInterval(timer)
if (killTimeout) clearTimeout(killTimeout)
// Cleanup heartbeat file
try { if (fs.existsSync(hbFile)) fs.unlinkSync(hbFile) } catch { /* ignore */ }
if (signal) {
await log('main', 'SCHEDULER', `Child exited due to signal: ${signal}`, 'warn')
} else if (code && code !== 0) {
await log('main', 'SCHEDULER', `Child exited with non-zero code: ${code}`, 'warn')
} else {
await log('main', 'SCHEDULER', 'Child pass completed successfully')
}
resolve()
})
child.on('error', async (err) => {
finished = true
clearInterval(timer)
if (killTimeout) clearTimeout(killTimeout)
try { if (fs.existsSync(hbFile)) fs.unlinkSync(hbFile) } catch { /* ignore */ }
await log('main', 'SCHEDULER', `Failed to spawn child: ${err instanceof Error ? err.message : String(err)}`, 'error')
resolve()
})
})
}
async function runPasses(passes: number): Promise<void> {
const n = Math.max(1, Math.floor(passes || 1))
for (let i = 1; i <= n; i++) {
await log('main', 'SCHEDULER', `Starting pass ${i}/${n}`)
const started = Date.now()
await runOnePassWithWatchdog()
const took = Date.now() - started
const sec = Math.max(1, Math.round(took / 1000))
await log('main', 'SCHEDULER', `Completed pass ${i}/${n}`)
await log('main', 'SCHEDULER', `Pass ${i} duration: ${sec}s`)
}
}
async function main() {
const cfg = loadConfig() as Config & { schedule?: { enabled?: boolean; time?: string; timeZone?: string; runImmediatelyOnStart?: boolean } }
const schedule = cfg.schedule || { enabled: false }
const passes = typeof cfg.passesPerRun === 'number' ? cfg.passesPerRun : 1
const offPerWeek = Math.max(0, Math.min(7, Number(cfg.humanization?.randomOffDaysPerWeek ?? 1)))
let offDays: number[] = [] // 1..7 ISO weekday
let offWeek: number | null = null
type VacRange = { start: string; end: string } | null
let vacMonth: string | null = null // 'yyyy-LL'
let vacRange: VacRange = null // ISO dates 'yyyy-LL-dd'
const refreshOffDays = async (now: { weekNumber: number }) => {
if (offPerWeek <= 0) { offDays = []; offWeek = null; return }
const week = now.weekNumber
if (offWeek === week && offDays.length) return
// choose distinct weekdays [1..7]
const pool = [1,2,3,4,5,6,7]
const chosen: number[] = []
for (let i=0;i<Math.min(offPerWeek,7);i++) {
const idx = Math.floor(Math.random()*pool.length)
chosen.push(pool[idx]!)
pool.splice(idx,1)
}
offDays = chosen.sort((a,b)=>a-b)
offWeek = week
const msg = offDays.length ? offDays.join(', ') : 'none'
await log('main','SCHEDULER',`Weekly humanization off-day sample (ISO weekday): ${msg} | adjust via config.humanization.randomOffDaysPerWeek`,'warn')
}
const chooseVacationRange = async (now: typeof DateTime.prototype) => {
// Only when enabled
if (!cfg.vacation?.enabled) { vacRange = null; vacMonth = null; return }
const monthKey = now.toFormat('yyyy-LL')
if (vacMonth === monthKey && vacRange) return
// Determine month days and choose contiguous block
const monthStart = now.startOf('month')
const monthEnd = now.endOf('month')
const totalDays = monthEnd.day
const minD = Math.max(1, Math.min(28, Number(cfg.vacation.minDays ?? 3)))
const maxD = Math.max(minD, Math.min(31, Number(cfg.vacation.maxDays ?? 5)))
const span = (minD === maxD) ? minD : (minD + Math.floor(Math.random() * (maxD - minD + 1)))
const latestStart = Math.max(1, totalDays - span + 1)
const startDay = 1 + Math.floor(Math.random() * latestStart)
const start = monthStart.set({ day: startDay })
const end = start.plus({ days: span - 1 })
vacMonth = monthKey
vacRange = { start: start.toFormat('yyyy-LL-dd'), end: end.toFormat('yyyy-LL-dd') }
await log('main','SCHEDULER',`Selected vacation block this month: ${vacRange.start}${vacRange.end} (${span} day(s))`,'warn')
}
if (!schedule.enabled) {
await log('main', 'SCHEDULER', 'Schedule disabled; running once then exit')
await runPasses(passes)
process.exit(0)
}
const tz = (schedule.timeZone && IANAZone.isValidZone(schedule.timeZone)) ? schedule.timeZone : 'UTC'
const cronExpressions = normalizeCronExpressions(schedule, tz)
// Default to false to avoid unexpected immediate runs
const runImmediate = schedule.runImmediatelyOnStart === true
let running = false
// Optional initial jitter before the first run (to vary start time)
const parseJitter = (minKey: string, maxKey: string, fallbackMin: string, fallbackMax: string): [number, number] => {
const minVal = Number(process.env[minKey] || process.env[fallbackMin] || 0)
const maxVal = Number(process.env[maxKey] || process.env[fallbackMax] || 0)
if (isNaN(minVal) || minVal < 0) {
void log('main', 'SCHEDULER', `Invalid ${minKey}="${process.env[minKey]}". Using 0`, 'warn')
return [0, isNaN(maxVal) || maxVal < 0 ? 0 : maxVal]
}
if (isNaN(maxVal) || maxVal < 0) {
void log('main', 'SCHEDULER', `Invalid ${maxKey}="${process.env[maxKey]}". Using 0`, 'warn')
return [minVal, 0]
}
return [minVal, maxVal]
}
const initialJitterBounds = parseJitter('SCHEDULER_INITIAL_JITTER_MINUTES_MIN', 'SCHEDULER_INITIAL_JITTER_MINUTES_MAX', 'SCHEDULER_INITIAL_JITTER_MIN', 'SCHEDULER_INITIAL_JITTER_MAX')
const applyInitialJitter = (initialJitterBounds[0] > 0 || initialJitterBounds[1] > 0)
if (runImmediate && !running) {
running = true
if (applyInitialJitter) {
const min = Math.max(0, Math.min(initialJitterBounds[0], initialJitterBounds[1]))
const max = Math.max(0, Math.max(initialJitterBounds[0], initialJitterBounds[1]))
const jitterSec = (min === max) ? min * 60 : (min * 60 + Math.floor(Math.random() * ((max - min) * 60)))
if (jitterSec > 0) {
await log('main', 'SCHEDULER', `Initial jitter: delaying first run by ${Math.round(jitterSec / 60)} minute(s) (${jitterSec}s)`, 'warn')
await new Promise((r) => setTimeout(r, jitterSec * 1000))
}
}
const nowDT = DateTime.local().setZone(tz)
await chooseVacationRange(nowDT)
await refreshOffDays(nowDT)
const todayIso = nowDT.toFormat('yyyy-LL-dd')
const vr = vacRange as { start: string; end: string } | null
const isVacationToday = !!(vr && todayIso >= vr.start && todayIso <= vr.end)
if (isVacationToday) {
await log('main','SCHEDULER',`Skipping immediate run: vacation day (${todayIso})`,'warn')
} else if (offDays.includes(nowDT.weekday)) {
await log('main','SCHEDULER',`Skipping immediate run: humanization off-day (ISO weekday ${nowDT.weekday}). Set humanization.randomOffDaysPerWeek=0 to disable.`,'warn')
} else {
await runPasses(passes)
}
running = false
}
for (;;) {
const nowDT = DateTime.local().setZone(tz)
const nextInfo = computeNextRun(nowDT, schedule, cronExpressions)
const next = nextInfo.next
let ms = Math.max(0, next.toMillis() - nowDT.toMillis())
// Optional daily jitter to further randomize the exact start time each day
let extraMs = 0
if (cronExpressions.length === 0) {
const dailyJitterBounds = parseJitter('SCHEDULER_DAILY_JITTER_MINUTES_MIN', 'SCHEDULER_DAILY_JITTER_MINUTES_MAX', 'SCHEDULER_DAILY_JITTER_MIN', 'SCHEDULER_DAILY_JITTER_MAX')
const djMin = dailyJitterBounds[0]
const djMax = dailyJitterBounds[1]
if (djMin > 0 || djMax > 0) {
const mn = Math.max(0, Math.min(djMin, djMax))
const mx = Math.max(0, Math.max(djMin, djMax))
const jitterSec = (mn === mx) ? mn * 60 : (mn * 60 + Math.floor(Math.random() * ((mx - mn) * 60)))
extraMs = jitterSec * 1000
ms += extraMs
}
}
const human = next.toFormat('yyyy-LL-dd HH:mm ZZZZ')
const totalSec = Math.round(ms / 1000)
const jitterMsg = extraMs > 0 ? ` plus daily jitter (+${Math.round(extraMs/60000)}m)` : ''
const sourceMsg = nextInfo.source === 'cron' ? ` [cron: ${nextInfo.detail}]` : ''
await log('main', 'SCHEDULER', `Next run at ${human}${jitterMsg}${sourceMsg} (in ${totalSec}s)`)
await new Promise((resolve) => setTimeout(resolve, ms))
const nowRun = DateTime.local().setZone(tz)
await chooseVacationRange(nowRun)
await refreshOffDays(nowRun)
const todayIso2 = nowRun.toFormat('yyyy-LL-dd')
const vr2 = vacRange as { start: string; end: string } | null
const isVacation = !!(vr2 && todayIso2 >= vr2.start && todayIso2 <= vr2.end)
if (isVacation) {
await log('main','SCHEDULER',`Skipping scheduled run: vacation day (${todayIso2})`,'warn')
continue
}
if (offDays.includes(nowRun.weekday)) {
await log('main','SCHEDULER',`Skipping scheduled run: humanization off-day (ISO weekday ${nowRun.weekday}). Set humanization.randomOffDaysPerWeek=0 to disable.`,'warn')
continue
}
if (!running) {
running = true
await runPasses(passes)
running = false
} else {
await log('main','SCHEDULER','Skipped scheduled trigger because a pass is already running','warn')
}
}
}
main().catch((e) => {
void log('main', 'SCHEDULER', `Fatal error: ${e instanceof Error ? e.message : String(e)}`, 'error')
process.exit(1)
})

View File

@@ -1,269 +0,0 @@
import fs from 'fs'
import path from 'path'
export interface DailyMetrics {
date: string // YYYY-MM-DD
email: string
pointsEarned: number
pointsInitial: number
pointsEnd: number
desktopPoints: number
mobilePoints: number
executionTimeMs: number
successRate: number // 0-1
errorsCount: number
banned: boolean
riskScore?: number
}
export interface AccountHistory {
email: string
totalRuns: number
totalPointsEarned: number
avgPointsPerDay: number
avgExecutionTime: number
successRate: number
lastRunDate: string
banHistory: Array<{ date: string; reason: string }>
riskTrend: number[] // last N risk scores
}
export interface AnalyticsSummary {
period: string // e.g., 'last-7-days', 'last-30-days', 'all-time'
accounts: AccountHistory[]
globalStats: {
totalPoints: number
avgSuccessRate: number
mostProductiveAccount: string
mostRiskyAccount: string
}
}
/**
* Analytics tracks performance metrics, point collection trends, and account health.
* Stores data in JSON files for lightweight persistence and easy analysis.
*/
export class Analytics {
private dataDir: string
constructor(baseDir: string = 'analytics') {
this.dataDir = path.join(process.cwd(), baseDir)
if (!fs.existsSync(this.dataDir)) {
fs.mkdirSync(this.dataDir, { recursive: true })
}
}
/**
* Record metrics for a completed account run
*/
recordRun(metrics: DailyMetrics): void {
const date = metrics.date
const email = this.sanitizeEmail(metrics.email)
const fileName = `${email}_${date}.json`
const filePath = path.join(this.dataDir, fileName)
try {
fs.writeFileSync(filePath, JSON.stringify(metrics, null, 2), 'utf-8')
} catch (error) {
console.error(`Failed to save metrics for ${metrics.email}:`, error)
}
}
/**
* Get history for a specific account
*/
getAccountHistory(email: string, days: number = 30): AccountHistory {
const sanitized = this.sanitizeEmail(email)
const files = this.getAccountFiles(sanitized, days)
if (files.length === 0) {
return {
email,
totalRuns: 0,
totalPointsEarned: 0,
avgPointsPerDay: 0,
avgExecutionTime: 0,
successRate: 1.0,
lastRunDate: 'never',
banHistory: [],
riskTrend: []
}
}
let totalPoints = 0
let totalTime = 0
let successCount = 0
const banHistory: Array<{ date: string; reason: string }> = []
const riskScores: number[] = []
for (const file of files) {
const filePath = path.join(this.dataDir, file)
try {
const data: DailyMetrics = JSON.parse(fs.readFileSync(filePath, 'utf-8'))
totalPoints += data.pointsEarned
totalTime += data.executionTimeMs
if (data.successRate > 0.5) successCount++
if (data.banned) {
banHistory.push({ date: data.date, reason: 'detected' })
}
if (typeof data.riskScore === 'number') {
riskScores.push(data.riskScore)
}
} catch {
continue
}
}
const totalRuns = files.length
const lastFile = files[files.length - 1]
const lastRunDate = lastFile ? lastFile.split('_')[1]?.replace('.json', '') || 'unknown' : 'unknown'
return {
email,
totalRuns,
totalPointsEarned: totalPoints,
avgPointsPerDay: Math.round(totalPoints / Math.max(1, totalRuns)),
avgExecutionTime: Math.round(totalTime / Math.max(1, totalRuns)),
successRate: successCount / Math.max(1, totalRuns),
lastRunDate,
banHistory,
riskTrend: riskScores.slice(-10) // last 10 risk scores
}
}
/**
* Generate a summary report for all accounts
*/
generateSummary(days: number = 30): AnalyticsSummary {
const accountEmails = this.getAllAccounts()
const accounts: AccountHistory[] = []
for (const email of accountEmails) {
accounts.push(this.getAccountHistory(email, days))
}
const totalPoints = accounts.reduce((sum, a) => sum + a.totalPointsEarned, 0)
const avgSuccess = accounts.reduce((sum, a) => sum + a.successRate, 0) / Math.max(1, accounts.length)
let mostProductive = ''
let maxPoints = 0
let mostRisky = ''
let maxRisk = 0
for (const acc of accounts) {
if (acc.totalPointsEarned > maxPoints) {
maxPoints = acc.totalPointsEarned
mostProductive = acc.email
}
const avgRisk = acc.riskTrend.reduce((s, r) => s + r, 0) / Math.max(1, acc.riskTrend.length)
if (avgRisk > maxRisk) {
maxRisk = avgRisk
mostRisky = acc.email
}
}
return {
period: `last-${days}-days`,
accounts,
globalStats: {
totalPoints,
avgSuccessRate: Number(avgSuccess.toFixed(2)),
mostProductiveAccount: mostProductive || 'none',
mostRiskyAccount: mostRisky || 'none'
}
}
}
/**
* Export summary as markdown table (for human readability)
*/
exportMarkdown(days: number = 30): string {
const summary = this.generateSummary(days)
const lines: string[] = []
lines.push(`# Analytics Summary (${summary.period})`)
lines.push('')
lines.push('## Global Stats')
lines.push(`- Total Points: ${summary.globalStats.totalPoints}`)
lines.push(`- Avg Success Rate: ${(summary.globalStats.avgSuccessRate * 100).toFixed(1)}%`)
lines.push(`- Most Productive: ${summary.globalStats.mostProductiveAccount}`)
lines.push(`- Most Risky: ${summary.globalStats.mostRiskyAccount}`)
lines.push('')
lines.push('## Per-Account Breakdown')
lines.push('')
lines.push('| Account | Runs | Total Points | Avg/Day | Success Rate | Last Run | Bans |')
lines.push('|---------|------|--------------|---------|--------------|----------|------|')
for (const acc of summary.accounts) {
const successPct = (acc.successRate * 100).toFixed(0)
const banCount = acc.banHistory.length
lines.push(
`| ${acc.email} | ${acc.totalRuns} | ${acc.totalPointsEarned} | ${acc.avgPointsPerDay} | ${successPct}% | ${acc.lastRunDate} | ${banCount} |`
)
}
// Optional community info footer (only in markdown exports)
lines.push('')
lines.push('---')
lines.push('*Looking for faster updates, advanced features, and better anti-detection? Join our community for enhanced versions and support.*')
return lines.join('\n')
}
/**
* Clean up old analytics files (retention policy)
*/
cleanup(retentionDays: number): void {
const files = fs.readdirSync(this.dataDir)
const cutoff = Date.now() - (retentionDays * 24 * 60 * 60 * 1000)
for (const file of files) {
if (!file.endsWith('.json')) continue
const filePath = path.join(this.dataDir, file)
try {
const stats = fs.statSync(filePath)
if (stats.mtimeMs < cutoff) {
fs.unlinkSync(filePath)
}
} catch {
continue
}
}
}
private sanitizeEmail(email: string): string {
return email.replace(/[^a-zA-Z0-9@._-]/g, '_')
}
private getAccountFiles(sanitizedEmail: string, days: number): string[] {
const files = fs.readdirSync(this.dataDir)
const cutoffDate = new Date()
cutoffDate.setDate(cutoffDate.getDate() - days)
return files
.filter((f: string) => f.startsWith(sanitizedEmail) && f.endsWith('.json'))
.filter((f: string) => {
const datePart = f.split('_')[1]?.replace('.json', '')
if (!datePart) return false
const fileDate = new Date(datePart)
return fileDate >= cutoffDate
})
.sort()
}
private getAllAccounts(): string[] {
const files = fs.readdirSync(this.dataDir)
const emailSet = new Set<string>()
for (const file of files) {
if (!file.endsWith('.json')) continue
const parts = file.split('_')
if (parts.length >= 2) {
const email = parts[0]
if (email) emailSet.add(email)
}
}
return Array.from(emailSet)
}
}

View File

@@ -197,36 +197,6 @@ export class ConfigValidator {
}
}
// Check schedule
if (config.schedule?.enabled) {
if (!config.schedule.timeZone) {
issues.push({
severity: 'warning',
field: 'schedule.timeZone',
message: 'No timeZone specified, defaulting to UTC',
suggestion: 'Set your local timezone (e.g., America/New_York)'
})
}
const useAmPm = config.schedule.useAmPm
const time12 = (config.schedule as unknown as Record<string, unknown>)['time12']
const time24 = (config.schedule as unknown as Record<string, unknown>)['time24']
if (useAmPm === true && (!time12 || (typeof time12 === 'string' && time12.trim() === ''))) {
issues.push({
severity: 'error',
field: 'schedule.time12',
message: 'useAmPm is true but time12 is empty'
})
}
if (useAmPm === false && (!time24 || (typeof time24 === 'string' && time24.trim() === ''))) {
issues.push({
severity: 'error',
field: 'schedule.time24',
message: 'useAmPm is false but time24 is empty'
})
}
}
// Check workers
if (config.workers) {
@@ -248,26 +218,6 @@ export class ConfigValidator {
}
}
// Check diagnostics
if (config.diagnostics?.enabled) {
const maxPerRun = config.diagnostics.maxPerRun || 2
if (maxPerRun > 20) {
issues.push({
severity: 'warning',
field: 'diagnostics.maxPerRun',
message: 'Very high maxPerRun may fill disk quickly'
})
}
const retention = config.diagnostics.retentionDays || 7
if (retention > 90) {
issues.push({
severity: 'info',
field: 'diagnostics.retentionDays',
message: 'Long retention period - monitor disk usage'
})
}
}
const valid = !issues.some(i => i.severity === 'error')
return { valid, issues }

View File

@@ -1,74 +0,0 @@
import path from 'path'
import fs from 'fs'
import type { Page } from 'rebrowser-playwright'
import type { MicrosoftRewardsBot } from '../index'
export type DiagnosticsScope = 'default' | 'security'
export interface DiagnosticsOptions {
scope?: DiagnosticsScope
skipSlot?: boolean
force?: boolean
}
export async function captureDiagnostics(bot: MicrosoftRewardsBot, page: Page, rawLabel: string, options?: DiagnosticsOptions): Promise<void> {
try {
const scope: DiagnosticsScope = options?.scope ?? 'default'
const cfg = bot.config?.diagnostics ?? {}
const forceCapture = options?.force === true || scope === 'security'
if (!forceCapture && cfg.enabled === false) return
if (scope === 'default') {
const maxPerRun = typeof cfg.maxPerRun === 'number' ? cfg.maxPerRun : 8
if (!options?.skipSlot && !bot.tryReserveDiagSlot(maxPerRun)) return
}
const saveScreenshot = scope === 'security' ? true : cfg.saveScreenshot !== false
const saveHtml = scope === 'security' ? true : cfg.saveHtml !== false
if (!saveScreenshot && !saveHtml) return
const safeLabel = rawLabel.replace(/[^a-z0-9-_]/gi, '_').slice(0, 64) || 'capture'
const now = new Date()
const timestamp = `${String(now.getHours()).padStart(2, '0')}${String(now.getMinutes()).padStart(2, '0')}${String(now.getSeconds()).padStart(2, '0')}`
let dir: string
if (scope === 'security') {
const base = path.join(process.cwd(), 'diagnostics', 'security-incidents')
fs.mkdirSync(base, { recursive: true })
const sub = `${now.toISOString().replace(/[:.]/g, '-')}-${safeLabel}`
dir = path.join(base, sub)
fs.mkdirSync(dir, { recursive: true })
} else {
const day = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`
dir = path.join(process.cwd(), 'reports', day)
fs.mkdirSync(dir, { recursive: true })
}
if (saveScreenshot) {
const shotName = scope === 'security' ? 'page.png' : `${timestamp}_${safeLabel}.png`
const shotPath = path.join(dir, shotName)
await page.screenshot({ path: shotPath }).catch(() => {})
if (scope === 'security') {
bot.log(bot.isMobile, 'DIAG', `Saved security screenshot to ${shotPath}`)
} else {
bot.log(bot.isMobile, 'DIAG', `Saved diagnostics screenshot to ${shotPath}`)
}
}
if (saveHtml) {
const htmlName = scope === 'security' ? 'page.html' : `${timestamp}_${safeLabel}.html`
const htmlPath = path.join(dir, htmlName)
try {
const html = await page.content()
await fs.promises.writeFile(htmlPath, html, 'utf-8')
if (scope === 'security') {
bot.log(bot.isMobile, 'DIAG', `Saved security HTML to ${htmlPath}`)
}
} catch {
/* ignore */
}
}
} catch (error) {
bot.log(bot.isMobile, 'DIAG', `Failed to capture diagnostics: ${error instanceof Error ? error.message : error}`, 'warn')
}
}

View File

@@ -123,10 +123,6 @@ function normalizeConfig(raw: unknown): Config {
const conclusionWebhook = notifications.conclusionWebhook ?? n.conclusionWebhook ?? { enabled: false, url: '' }
const ntfy = notifications.ntfy ?? n.ntfy ?? { enabled: false, url: '', topic: '', authToken: '' }
// Buy Mode
const buyMode = n.buyMode ?? {}
const buyModeEnabled = typeof buyMode.enabled === 'boolean' ? buyMode.enabled : false
const buyModeMax = typeof buyMode.maxMinutes === 'number' ? buyMode.maxMinutes : 45
// Fingerprinting
const saveFingerprint = (n.fingerprinting?.saveFingerprint ?? n.saveFingerprint) ?? { mobile: false, desktop: false }
@@ -180,12 +176,8 @@ function normalizeConfig(raw: unknown): Config {
webhook,
conclusionWebhook,
ntfy,
diagnostics: n.diagnostics,
update: n.update,
schedule: n.schedule,
passesPerRun: passesPerRun,
vacation: n.vacation,
buyMode: { enabled: buyModeEnabled, maxMinutes: buyModeMax },
crashRecovery: n.crashRecovery || {}
}
@@ -376,4 +368,4 @@ export async function saveFingerprintData(sessionPath: string, email: string, is
} catch (error) {
throw new Error(error as string)
}
}
}