Initial commit

This commit is contained in:
2025-11-01 20:44:31 +01:00
commit 6d549e6590
91 changed files with 20404 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
export class AdaptiveThrottler {
private errorCount = 0
private successCount = 0
private window: Array<{ ok: boolean; at: number }> = []
private readonly maxWindow = 50
record(ok: boolean) {
this.window.push({ ok, at: Date.now() })
if (ok) this.successCount++
else this.errorCount++
if (this.window.length > this.maxWindow) {
const removed = this.window.shift()
if (removed) removed.ok ? this.successCount-- : this.errorCount--
}
}
/** Return a multiplier to apply to waits (1 = normal). */
getDelayMultiplier(): number {
const total = Math.max(1, this.successCount + this.errorCount)
const errRatio = this.errorCount / total
// 0% errors -> 1x; 50% errors -> ~1.8x; 80% -> ~2.5x (cap)
const mult = 1 + Math.min(1.5, errRatio * 2)
return Number(mult.toFixed(2))
}
}

264
src/util/Analytics.ts Normal file
View File

@@ -0,0 +1,264 @@
import fs from 'fs'
import path from 'path'
export interface DailyMetrics {
date: string // YYYY-MM-DD
email: string
pointsEarned: number
pointsInitial: number
pointsEnd: number
desktopPoints: number
mobilePoints: number
executionTimeMs: number
successRate: number // 0-1
errorsCount: number
banned: boolean
riskScore?: number
}
export interface AccountHistory {
email: string
totalRuns: number
totalPointsEarned: number
avgPointsPerDay: number
avgExecutionTime: number
successRate: number
lastRunDate: string
banHistory: Array<{ date: string; reason: string }>
riskTrend: number[] // last N risk scores
}
export interface AnalyticsSummary {
period: string // e.g., 'last-7-days', 'last-30-days', 'all-time'
accounts: AccountHistory[]
globalStats: {
totalPoints: number
avgSuccessRate: number
mostProductiveAccount: string
mostRiskyAccount: string
}
}
/**
* Analytics tracks performance metrics, point collection trends, and account health.
* Stores data in JSON files for lightweight persistence and easy analysis.
*/
export class Analytics {
private dataDir: string
constructor(baseDir: string = 'analytics') {
this.dataDir = path.join(process.cwd(), baseDir)
if (!fs.existsSync(this.dataDir)) {
fs.mkdirSync(this.dataDir, { recursive: true })
}
}
/**
* Record metrics for a completed account run
*/
recordRun(metrics: DailyMetrics): void {
const date = metrics.date
const email = this.sanitizeEmail(metrics.email)
const fileName = `${email}_${date}.json`
const filePath = path.join(this.dataDir, fileName)
try {
fs.writeFileSync(filePath, JSON.stringify(metrics, null, 2), 'utf-8')
} catch (error) {
console.error(`Failed to save metrics for ${metrics.email}:`, error)
}
}
/**
* Get history for a specific account
*/
getAccountHistory(email: string, days: number = 30): AccountHistory {
const sanitized = this.sanitizeEmail(email)
const files = this.getAccountFiles(sanitized, days)
if (files.length === 0) {
return {
email,
totalRuns: 0,
totalPointsEarned: 0,
avgPointsPerDay: 0,
avgExecutionTime: 0,
successRate: 1.0,
lastRunDate: 'never',
banHistory: [],
riskTrend: []
}
}
let totalPoints = 0
let totalTime = 0
let successCount = 0
const banHistory: Array<{ date: string; reason: string }> = []
const riskScores: number[] = []
for (const file of files) {
const filePath = path.join(this.dataDir, file)
try {
const data: DailyMetrics = JSON.parse(fs.readFileSync(filePath, 'utf-8'))
totalPoints += data.pointsEarned
totalTime += data.executionTimeMs
if (data.successRate > 0.5) successCount++
if (data.banned) {
banHistory.push({ date: data.date, reason: 'detected' })
}
if (typeof data.riskScore === 'number') {
riskScores.push(data.riskScore)
}
} catch {
continue
}
}
const totalRuns = files.length
const lastFile = files[files.length - 1]
const lastRunDate = lastFile ? lastFile.split('_')[1]?.replace('.json', '') || 'unknown' : 'unknown'
return {
email,
totalRuns,
totalPointsEarned: totalPoints,
avgPointsPerDay: Math.round(totalPoints / Math.max(1, totalRuns)),
avgExecutionTime: Math.round(totalTime / Math.max(1, totalRuns)),
successRate: successCount / Math.max(1, totalRuns),
lastRunDate,
banHistory,
riskTrend: riskScores.slice(-10) // last 10 risk scores
}
}
/**
* Generate a summary report for all accounts
*/
generateSummary(days: number = 30): AnalyticsSummary {
const accountEmails = this.getAllAccounts()
const accounts: AccountHistory[] = []
for (const email of accountEmails) {
accounts.push(this.getAccountHistory(email, days))
}
const totalPoints = accounts.reduce((sum, a) => sum + a.totalPointsEarned, 0)
const avgSuccess = accounts.reduce((sum, a) => sum + a.successRate, 0) / Math.max(1, accounts.length)
let mostProductive = ''
let maxPoints = 0
let mostRisky = ''
let maxRisk = 0
for (const acc of accounts) {
if (acc.totalPointsEarned > maxPoints) {
maxPoints = acc.totalPointsEarned
mostProductive = acc.email
}
const avgRisk = acc.riskTrend.reduce((s, r) => s + r, 0) / Math.max(1, acc.riskTrend.length)
if (avgRisk > maxRisk) {
maxRisk = avgRisk
mostRisky = acc.email
}
}
return {
period: `last-${days}-days`,
accounts,
globalStats: {
totalPoints,
avgSuccessRate: Number(avgSuccess.toFixed(2)),
mostProductiveAccount: mostProductive || 'none',
mostRiskyAccount: mostRisky || 'none'
}
}
}
/**
* Export summary as markdown table (for human readability)
*/
exportMarkdown(days: number = 30): string {
const summary = this.generateSummary(days)
const lines: string[] = []
lines.push(`# Analytics Summary (${summary.period})`)
lines.push('')
lines.push('## Global Stats')
lines.push(`- Total Points: ${summary.globalStats.totalPoints}`)
lines.push(`- Avg Success Rate: ${(summary.globalStats.avgSuccessRate * 100).toFixed(1)}%`)
lines.push(`- Most Productive: ${summary.globalStats.mostProductiveAccount}`)
lines.push(`- Most Risky: ${summary.globalStats.mostRiskyAccount}`)
lines.push('')
lines.push('## Per-Account Breakdown')
lines.push('')
lines.push('| Account | Runs | Total Points | Avg/Day | Success Rate | Last Run | Bans |')
lines.push('|---------|------|--------------|---------|--------------|----------|------|')
for (const acc of summary.accounts) {
const successPct = (acc.successRate * 100).toFixed(0)
const banCount = acc.banHistory.length
lines.push(
`| ${acc.email} | ${acc.totalRuns} | ${acc.totalPointsEarned} | ${acc.avgPointsPerDay} | ${successPct}% | ${acc.lastRunDate} | ${banCount} |`
)
}
return lines.join('\n')
}
/**
* Clean up old analytics files (retention policy)
*/
cleanup(retentionDays: number): void {
const files = fs.readdirSync(this.dataDir)
const cutoff = Date.now() - (retentionDays * 24 * 60 * 60 * 1000)
for (const file of files) {
if (!file.endsWith('.json')) continue
const filePath = path.join(this.dataDir, file)
try {
const stats = fs.statSync(filePath)
if (stats.mtimeMs < cutoff) {
fs.unlinkSync(filePath)
}
} catch {
continue
}
}
}
private sanitizeEmail(email: string): string {
return email.replace(/[^a-zA-Z0-9@._-]/g, '_')
}
private getAccountFiles(sanitizedEmail: string, days: number): string[] {
const files = fs.readdirSync(this.dataDir)
const cutoffDate = new Date()
cutoffDate.setDate(cutoffDate.getDate() - days)
return files
.filter((f: string) => f.startsWith(sanitizedEmail) && f.endsWith('.json'))
.filter((f: string) => {
const datePart = f.split('_')[1]?.replace('.json', '')
if (!datePart) return false
const fileDate = new Date(datePart)
return fileDate >= cutoffDate
})
.sort()
}
private getAllAccounts(): string[] {
const files = fs.readdirSync(this.dataDir)
const emailSet = new Set<string>()
for (const file of files) {
if (!file.endsWith('.json')) continue
const parts = file.split('_')
if (parts.length >= 2) {
const email = parts[0]
if (email) emailSet.add(email)
}
}
return Array.from(emailSet)
}
}

141
src/util/Axios.ts Normal file
View File

@@ -0,0 +1,141 @@
import axios, { AxiosError, AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'
import { HttpProxyAgent } from 'http-proxy-agent'
import { HttpsProxyAgent } from 'https-proxy-agent'
import { SocksProxyAgent } from 'socks-proxy-agent'
import { AccountProxy } from '../interface/Account'
class AxiosClient {
private instance: AxiosInstance
private account: AccountProxy
constructor(account: AccountProxy) {
this.account = account
this.instance = axios.create()
// If a proxy configuration is provided, set up the agent
if (this.account.url && this.account.proxyAxios) {
const agent = this.getAgentForProxy(this.account)
this.instance.defaults.httpAgent = agent
this.instance.defaults.httpsAgent = agent
}
}
private getAgentForProxy(proxyConfig: AccountProxy): HttpProxyAgent<string> | HttpsProxyAgent<string> | SocksProxyAgent {
const { proxyUrl, protocol } = this.buildProxyUrl(proxyConfig)
const normalized = protocol.replace(/:$/, '')
switch (normalized) {
case 'http':
return new HttpProxyAgent(proxyUrl)
case 'https':
return new HttpsProxyAgent(proxyUrl)
case 'socks':
case 'socks4':
case 'socks5':
return new SocksProxyAgent(proxyUrl)
default:
throw new Error(`Unsupported proxy protocol in "${proxyConfig.url}". Supported: http://, https://, socks://, socks4://, socks5://`)
}
}
private buildProxyUrl(proxyConfig: AccountProxy): { proxyUrl: string; protocol: string } {
const { url, port, username, password } = proxyConfig
if (!url) {
throw new Error('Proxy URL is required when proxyAxios is enabled.')
}
const hasScheme = /^[a-zA-Z][a-zA-Z0-9+.-]*:/.test(url)
const candidate = hasScheme ? url : `http://${url}`
let parsedUrl: URL
try {
parsedUrl = new URL(candidate)
} catch (err) {
throw new Error(`Invalid proxy URL "${url}": ${(err as Error).message}`)
}
const protocol = parsedUrl.protocol.replace(/:$/, '')
const allowed = new Set(['http', 'https', 'socks', 'socks4', 'socks5'])
if (!allowed.has(protocol)) {
throw new Error(`Unsupported proxy protocol in "${url}". Supported: http://, https://, socks://, socks4://, socks5://`)
}
if (!parsedUrl.port) {
if (port) {
parsedUrl.port = String(port)
} else {
throw new Error(`Proxy port missing for "${url}". Provide a port value.`)
}
}
if (username) {
parsedUrl.username = encodeURIComponent(username)
}
if (password) {
parsedUrl.password = encodeURIComponent(password)
}
return { proxyUrl: parsedUrl.toString(), protocol: parsedUrl.protocol }
}
// Generic method to make any Axios request
public async request(config: AxiosRequestConfig, bypassProxy = false): Promise<AxiosResponse> {
if (bypassProxy) {
const bypassInstance = axios.create()
return bypassInstance.request(config)
}
let lastError: unknown
const maxAttempts = 2
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
return await this.instance.request(config)
} catch (err: unknown) {
lastError = err
const axiosErr = err as AxiosError | undefined
// Detect HTTP proxy auth failures (status 407) and retry without proxy
if (axiosErr && axiosErr.response && axiosErr.response.status === 407) {
if (attempt < maxAttempts) {
await this.sleep(1000 * attempt) // Exponential backoff
}
const bypassInstance = axios.create()
return bypassInstance.request(config)
}
// If proxied request fails with common proxy/network errors, retry with backoff
const e = err as { code?: string; cause?: { code?: string }; message?: string } | undefined
const code = e?.code || e?.cause?.code
const isNetErr = code === 'ECONNREFUSED' || code === 'ETIMEDOUT' || code === 'ECONNRESET' || code === 'ENOTFOUND'
const msg = String(e?.message || '')
const looksLikeProxyIssue = /proxy|tunnel|socks|agent/i.test(msg)
if (isNetErr || looksLikeProxyIssue) {
if (attempt < maxAttempts) {
// Exponential backoff: 1s, 2s, 4s, etc.
const delayMs = 1000 * Math.pow(2, attempt - 1)
await this.sleep(delayMs)
continue
}
// Last attempt: try without proxy
const bypassInstance = axios.create()
return bypassInstance.request(config)
}
// Non-retryable error
throw err
}
}
throw lastError
}
private sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
}
export default AxiosClient

16
src/util/BanDetector.ts Normal file
View File

@@ -0,0 +1,16 @@
export type BanStatus = { status: boolean; reason: string }
const BAN_PATTERNS: Array<{ re: RegExp; reason: string }> = [
{ re: /suspend|suspended|suspension/i, reason: 'account suspended' },
{ re: /locked|lockout|serviceabuse|abuse/i, reason: 'locked or service abuse detected' },
{ re: /unusual.*activity|unusual activity/i, reason: 'unusual activity prompts' },
{ re: /verify.*identity|identity.*verification/i, reason: 'identity verification required' }
]
export function detectBanReason(input: unknown): BanStatus {
const s = input instanceof Error ? (input.message || '') : String(input || '')
for (const p of BAN_PATTERNS) {
if (p.re.test(s)) return { status: true, reason: p.reason }
}
return { status: false, reason: '' }
}

394
src/util/BanPredictor.ts Normal file
View File

@@ -0,0 +1,394 @@
import { RiskManager, RiskEvent } from './RiskManager'
export interface BanPattern {
name: string
description: string
weight: number // 0-10
detected: boolean
evidence: string[]
}
export interface BanPrediction {
riskScore: number // 0-100
confidence: number // 0-1
likelihood: 'very-low' | 'low' | 'medium' | 'high' | 'critical'
patterns: BanPattern[]
recommendation: string
preventiveActions: string[]
}
export interface HistoricalData {
email: string
timestamp: number
banned: boolean
preBanEvents: RiskEvent[]
accountAge: number // days since first use
totalRuns: number
}
/**
* BanPredictor uses machine-learning-style pattern analysis to predict ban risk.
* Learns from historical data and real-time signals to calculate ban probability.
*/
export class BanPredictor {
private riskManager: RiskManager
private history: HistoricalData[] = []
private patterns: BanPattern[] = []
constructor(riskManager: RiskManager) {
this.riskManager = riskManager
this.initializePatterns()
}
/**
* Analyze current state and predict ban risk
*/
predictBanRisk(accountEmail: string, accountAgeDays: number, totalRuns: number): BanPrediction {
const riskMetrics = this.riskManager.assessRisk()
const recentEvents = this.riskManager.getRecentEvents(60)
// Detect patterns
this.detectPatterns(recentEvents, accountAgeDays, totalRuns)
// Calculate base risk from RiskManager
const baseRisk = riskMetrics.score
// Apply ML-style feature weights
const featureScore = this.calculateFeatureScore(recentEvents, accountAgeDays, totalRuns)
// Pattern detection bonus
const detectedPatterns = this.patterns.filter(p => p.detected)
const patternPenalty = detectedPatterns.reduce((sum, p) => sum + p.weight, 0)
// Historical learning adjustment
const historicalAdjustment = this.getHistoricalAdjustment(accountEmail)
// Final risk score (capped at 100)
const finalScore = Math.min(100, baseRisk + featureScore + patternPenalty + historicalAdjustment)
// Calculate confidence (based on data availability)
const confidence = this.calculateConfidence(recentEvents.length, this.history.length)
// Determine likelihood tier
let likelihood: BanPrediction['likelihood']
if (finalScore < 20) likelihood = 'very-low'
else if (finalScore < 40) likelihood = 'low'
else if (finalScore < 60) likelihood = 'medium'
else if (finalScore < 80) likelihood = 'high'
else likelihood = 'critical'
// Generate recommendations
const recommendation = this.generateRecommendation(finalScore)
const preventiveActions = this.generatePreventiveActions(detectedPatterns)
return {
riskScore: Math.round(finalScore),
confidence: Number(confidence.toFixed(2)),
likelihood,
patterns: detectedPatterns,
recommendation,
preventiveActions
}
}
/**
* Record ban event for learning
*/
recordBan(email: string, accountAgeDays: number, totalRuns: number): void {
const preBanEvents = this.riskManager.getRecentEvents(120)
this.history.push({
email,
timestamp: Date.now(),
banned: true,
preBanEvents,
accountAge: accountAgeDays,
totalRuns
})
// Keep history limited (last 100 bans)
if (this.history.length > 100) {
this.history.shift()
}
}
/**
* Record successful run (no ban) for learning
*/
recordSuccess(email: string, accountAgeDays: number, totalRuns: number): void {
this.history.push({
email,
timestamp: Date.now(),
banned: false,
preBanEvents: [],
accountAge: accountAgeDays,
totalRuns
})
if (this.history.length > 100) {
this.history.shift()
}
}
/**
* Initialize known ban patterns
*/
private initializePatterns(): void {
this.patterns = [
{
name: 'rapid-captcha-sequence',
description: 'Multiple captchas in short timespan',
weight: 8,
detected: false,
evidence: []
},
{
name: 'high-error-rate',
description: 'Excessive errors (>50% in last hour)',
weight: 6,
detected: false,
evidence: []
},
{
name: 'timeout-storm',
description: 'Many consecutive timeouts',
weight: 7,
detected: false,
evidence: []
},
{
name: 'suspicious-timing',
description: 'Activity at unusual hours or too consistent',
weight: 5,
detected: false,
evidence: []
},
{
name: 'new-account-aggressive',
description: 'Aggressive activity on young account',
weight: 9,
detected: false,
evidence: []
},
{
name: 'proxy-flagged',
description: 'Proxy showing signs of blacklisting',
weight: 7,
detected: false,
evidence: []
}
]
}
/**
* Detect patterns in recent events
*/
private detectPatterns(events: RiskEvent[], accountAgeDays: number, totalRuns: number): void {
// Reset detection
for (const p of this.patterns) {
p.detected = false
p.evidence = []
}
const captchaEvents = events.filter(e => e.type === 'captcha')
const errorEvents = events.filter(e => e.type === 'error')
const timeoutEvents = events.filter(e => e.type === 'timeout')
// Pattern 1: Rapid captcha sequence
if (captchaEvents.length >= 3) {
const timeSpan = (events[events.length - 1]?.timestamp || 0) - (events[0]?.timestamp || 0)
if (timeSpan < 1800000) { // 30 min
const p = this.patterns.find(pat => pat.name === 'rapid-captcha-sequence')
if (p) {
p.detected = true
p.evidence.push(`${captchaEvents.length} captchas in ${Math.round(timeSpan / 60000)}min`)
}
}
}
// Pattern 2: High error rate
const errorRate = errorEvents.length / Math.max(1, events.length)
if (errorRate > 0.5) {
const p = this.patterns.find(pat => pat.name === 'high-error-rate')
if (p) {
p.detected = true
p.evidence.push(`Error rate: ${(errorRate * 100).toFixed(1)}%`)
}
}
// Pattern 3: Timeout storm
if (timeoutEvents.length >= 5) {
const p = this.patterns.find(pat => pat.name === 'timeout-storm')
if (p) {
p.detected = true
p.evidence.push(`${timeoutEvents.length} timeouts detected`)
}
}
// Pattern 4: Suspicious timing (all events within same hour)
if (events.length > 5) {
const hours = new Set(events.map(e => new Date(e.timestamp).getHours()))
if (hours.size === 1) {
const p = this.patterns.find(pat => pat.name === 'suspicious-timing')
if (p) {
p.detected = true
p.evidence.push('All activity in same hour of day')
}
}
}
// Pattern 5: New account aggressive
if (accountAgeDays < 7 && totalRuns > 10) {
const p = this.patterns.find(pat => pat.name === 'new-account-aggressive')
if (p) {
p.detected = true
p.evidence.push(`Account ${accountAgeDays} days old with ${totalRuns} runs`)
}
}
// Pattern 6: Proxy flagged (heuristic: many ban hints)
const banHints = events.filter(e => e.type === 'ban_hint')
if (banHints.length >= 2) {
const p = this.patterns.find(pat => pat.name === 'proxy-flagged')
if (p) {
p.detected = true
p.evidence.push(`${banHints.length} ban hints detected`)
}
}
}
/**
* Calculate feature-based risk score (ML-style)
*/
private calculateFeatureScore(events: RiskEvent[], accountAgeDays: number, totalRuns: number): number {
let score = 0
// Feature 1: Event density (events per minute)
const eventDensity = events.length / 60
if (eventDensity > 0.5) score += 10
else if (eventDensity > 0.2) score += 5
// Feature 2: Account age risk
if (accountAgeDays < 3) score += 15
else if (accountAgeDays < 7) score += 10
else if (accountAgeDays < 14) score += 5
// Feature 3: Run frequency risk
const runsPerDay = totalRuns / Math.max(1, accountAgeDays)
if (runsPerDay > 3) score += 12
else if (runsPerDay > 2) score += 6
// Feature 4: Severity distribution
const highSeverityEvents = events.filter(e => e.severity >= 7)
if (highSeverityEvents.length > 3) score += 15
else if (highSeverityEvents.length > 1) score += 8
return score
}
/**
* Learn from historical data
*/
private getHistoricalAdjustment(email: string): number {
const accountHistory = this.history.filter(h => h.email === email)
if (accountHistory.length === 0) return 0
const bannedCount = accountHistory.filter(h => h.banned).length
const banRate = bannedCount / accountHistory.length
// If this account has high ban history, increase risk
if (banRate > 0.3) return 20
if (banRate > 0.1) return 10
// If clean history, slight bonus
if (accountHistory.length > 5 && banRate === 0) return -5
return 0
}
/**
* Calculate prediction confidence
*/
private calculateConfidence(eventCount: number, historyCount: number): number {
let confidence = 0.5
// More events = higher confidence
if (eventCount > 20) confidence += 0.2
else if (eventCount > 10) confidence += 0.1
// More historical data = higher confidence
if (historyCount > 50) confidence += 0.2
else if (historyCount > 20) confidence += 0.1
return Math.min(1.0, confidence)
}
/**
* Generate human-readable recommendation
*/
private generateRecommendation(score: number): string {
if (score < 20) {
return 'Safe to proceed. Risk is minimal.'
} else if (score < 40) {
return 'Low risk detected. Monitor for issues but safe to continue.'
} else if (score < 60) {
return 'Moderate risk. Consider increasing delays and reviewing patterns.'
} else if (score < 80) {
return 'High risk! Strongly recommend pausing automation for 24-48 hours.'
} else {
return 'CRITICAL RISK! Stop all automation immediately. Manual review required.'
}
}
/**
* Generate actionable preventive steps
*/
private generatePreventiveActions(patterns: BanPattern[]): string[] {
const actions: string[] = []
if (patterns.some(p => p.name === 'rapid-captcha-sequence')) {
actions.push('Increase search delays to 3-5 minutes minimum')
actions.push('Enable longer cool-down periods between activities')
}
if (patterns.some(p => p.name === 'high-error-rate')) {
actions.push('Check proxy connectivity and health')
actions.push('Verify User-Agent and fingerprint configuration')
}
if (patterns.some(p => p.name === 'new-account-aggressive')) {
actions.push('Slow down activity on new accounts (max 1 run per day for first week)')
actions.push('Allow account to age naturally before heavy automation')
}
if (patterns.some(p => p.name === 'proxy-flagged')) {
actions.push('Rotate to different proxy immediately')
actions.push('Test proxy manually before resuming')
}
if (patterns.some(p => p.name === 'suspicious-timing')) {
actions.push('Randomize execution times across different hours')
actions.push('Enable humanization.allowedWindows with varied schedules')
}
if (actions.length === 0) {
actions.push('Continue monitoring but no immediate action needed')
}
return actions
}
/**
* Export historical data for analysis
*/
exportHistory(): HistoricalData[] {
return [...this.history]
}
/**
* Import historical data (for persistence)
*/
importHistory(data: HistoricalData[]): void {
this.history = data.slice(-100) // Keep last 100
}
}

View File

@@ -0,0 +1,112 @@
import axios from 'axios'
import { Config } from '../interface/Config'
import { Ntfy } from './Ntfy'
import { log } from './Logger'
interface DiscordField {
name: string
value: string
inline?: boolean
}
interface DiscordEmbed {
title?: string
description?: string
color?: number
fields?: DiscordField[]
timestamp?: string
thumbnail?: {
url: string
}
footer?: {
text: string
icon_url?: string
}
}
interface WebhookPayload {
username: string
avatar_url: string
embeds: DiscordEmbed[]
}
/**
* Send a clean, structured Discord webhook notification
*/
export async function ConclusionWebhook(
config: Config,
title: string,
description: string,
fields?: DiscordField[],
color?: number
) {
const hasConclusion = config.conclusionWebhook?.enabled && config.conclusionWebhook.url
const hasWebhook = config.webhook?.enabled && config.webhook.url
if (!hasConclusion && !hasWebhook) return
const embed: DiscordEmbed = {
title,
description,
color: color || 0x0078D4,
timestamp: new Date().toISOString(),
thumbnail: {
url: 'https://media.discordapp.net/attachments/1430643658788438144/1430644205344133290/rewi-v1.png'
}
}
if (fields && fields.length > 0) {
embed.fields = fields
}
const payload: WebhookPayload = {
username: 'MS Rewi 🎮',
avatar_url: 'https://media.discordapp.net/attachments/1430643658788438144/1430644205344133290/rewi-v1.png',
embeds: [embed]
}
const postWebhook = async (url: string, label: string) => {
const maxAttempts = 3
let lastError: unknown = null
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
await axios.post(url, payload, {
headers: { 'Content-Type': 'application/json' },
timeout: 15000
})
log('main', 'WEBHOOK', `${label} notification sent successfully (attempt ${attempt})`)
return
} catch (error) {
lastError = error
if (attempt < maxAttempts) {
// Exponential backoff: 1s, 2s, 4s
const delayMs = 1000 * Math.pow(2, attempt - 1)
await new Promise(resolve => setTimeout(resolve, delayMs))
}
}
}
log('main', 'WEBHOOK', `${label} failed after ${maxAttempts} attempts: ${lastError instanceof Error ? lastError.message : String(lastError)}`, 'error')
}
const urls = new Set<string>()
if (hasConclusion) urls.add(config.conclusionWebhook!.url)
if (hasWebhook) urls.add(config.webhook!.url)
await Promise.all(
Array.from(urls).map((url, index) => postWebhook(url, `webhook-${index + 1}`))
)
// Optional NTFY notification
if (config.ntfy?.enabled && config.ntfy.url && config.ntfy.topic) {
const message = `${title}\n${description}${fields ? '\n\n' + fields.map(f => `${f.name}: ${f.value}`).join('\n') : ''}`
const ntfyType = color === 0xFF0000 ? 'error' : color === 0xFFAA00 ? 'warn' : 'log'
try {
await Ntfy(message, ntfyType)
log('main', 'NTFY', 'Notification sent successfully')
} catch (error) {
log('main', 'NTFY', `Failed to send notification: ${error instanceof Error ? error.message : String(error)}`, 'error')
}
}
}

532
src/util/ConfigValidator.ts Normal file
View File

@@ -0,0 +1,532 @@
import fs from 'fs'
import { Config } from '../interface/Config'
import { Account } from '../interface/Account'
export interface ValidationIssue {
severity: 'error' | 'warning' | 'info'
field: string
message: string
suggestion?: string
}
export interface ValidationResult {
valid: boolean
issues: ValidationIssue[]
}
/**
* ConfigValidator performs intelligent validation of config.jsonc and accounts.json
* before execution to catch common mistakes, conflicts, and security issues.
*/
export class ConfigValidator {
/**
* Validate the main config file
*/
static validateConfig(config: Config): ValidationResult {
const issues: ValidationIssue[] = []
// Check baseURL
if (!config.baseURL || !config.baseURL.startsWith('https://')) {
issues.push({
severity: 'error',
field: 'baseURL',
message: 'baseURL must be a valid HTTPS URL',
suggestion: 'Use https://rewards.bing.com'
})
}
// Check sessionPath
if (!config.sessionPath || config.sessionPath.trim() === '') {
issues.push({
severity: 'error',
field: 'sessionPath',
message: 'sessionPath cannot be empty'
})
}
// Check clusters
if (config.clusters < 1) {
issues.push({
severity: 'error',
field: 'clusters',
message: 'clusters must be at least 1'
})
}
if (config.clusters > 10) {
issues.push({
severity: 'warning',
field: 'clusters',
message: 'High cluster count may consume excessive resources',
suggestion: 'Consider using 2-4 clusters for optimal performance'
})
}
// Check globalTimeout
const timeout = this.parseTimeout(config.globalTimeout)
if (timeout < 10000) {
issues.push({
severity: 'warning',
field: 'globalTimeout',
message: 'Very short timeout may cause frequent failures',
suggestion: 'Use at least 15s for stability'
})
}
if (timeout > 120000) {
issues.push({
severity: 'warning',
field: 'globalTimeout',
message: 'Very long timeout may slow down execution',
suggestion: 'Use 30-60s for optimal balance'
})
}
// Check search settings
if (config.searchSettings) {
const searchDelay = config.searchSettings.searchDelay
const minDelay = this.parseTimeout(searchDelay.min)
const maxDelay = this.parseTimeout(searchDelay.max)
if (minDelay >= maxDelay) {
issues.push({
severity: 'error',
field: 'searchSettings.searchDelay',
message: 'min delay must be less than max delay'
})
}
if (minDelay < 10000) {
issues.push({
severity: 'warning',
field: 'searchSettings.searchDelay.min',
message: 'Very short search delays increase ban risk',
suggestion: 'Use at least 30s between searches'
})
}
if (config.searchSettings.retryMobileSearchAmount > 5) {
issues.push({
severity: 'warning',
field: 'searchSettings.retryMobileSearchAmount',
message: 'Too many retries may waste time',
suggestion: 'Use 2-3 retries maximum'
})
}
}
// Check humanization
if (config.humanization) {
if (config.humanization.enabled === false && config.humanization.stopOnBan === true) {
issues.push({
severity: 'warning',
field: 'humanization',
message: 'stopOnBan is enabled but humanization is disabled',
suggestion: 'Enable humanization for better ban protection'
})
}
const actionDelay = config.humanization.actionDelay
if (actionDelay) {
const minAction = this.parseTimeout(actionDelay.min)
const maxAction = this.parseTimeout(actionDelay.max)
if (minAction >= maxAction) {
issues.push({
severity: 'error',
field: 'humanization.actionDelay',
message: 'min action delay must be less than max'
})
}
}
if (config.humanization.allowedWindows && config.humanization.allowedWindows.length > 0) {
for (const window of config.humanization.allowedWindows) {
if (!/^\d{2}:\d{2}-\d{2}:\d{2}$/.test(window)) {
issues.push({
severity: 'error',
field: 'humanization.allowedWindows',
message: `Invalid time window format: ${window}`,
suggestion: 'Use format HH:mm-HH:mm (e.g., 09:00-17:00)'
})
}
}
}
}
// Check proxy config
if (config.proxy) {
if (config.proxy.proxyGoogleTrends === false && config.proxy.proxyBingTerms === false) {
issues.push({
severity: 'info',
field: 'proxy',
message: 'All proxy options disabled - outbound requests will use direct connection'
})
}
}
// Check webhooks
if (config.webhook?.enabled && (!config.webhook.url || config.webhook.url.trim() === '')) {
issues.push({
severity: 'error',
field: 'webhook.url',
message: 'Webhook enabled but URL is empty'
})
}
if (config.conclusionWebhook?.enabled && (!config.conclusionWebhook.url || config.conclusionWebhook.url.trim() === '')) {
issues.push({
severity: 'error',
field: 'conclusionWebhook.url',
message: 'Conclusion webhook enabled but URL is empty'
})
}
// Check ntfy
if (config.ntfy?.enabled) {
if (!config.ntfy.url || config.ntfy.url.trim() === '') {
issues.push({
severity: 'error',
field: 'ntfy.url',
message: 'NTFY enabled but URL is empty'
})
}
if (!config.ntfy.topic || config.ntfy.topic.trim() === '') {
issues.push({
severity: 'error',
field: 'ntfy.topic',
message: 'NTFY enabled but topic is empty'
})
}
}
// Check schedule
if (config.schedule?.enabled) {
if (!config.schedule.timeZone) {
issues.push({
severity: 'warning',
field: 'schedule.timeZone',
message: 'No timeZone specified, defaulting to UTC',
suggestion: 'Set your local timezone (e.g., America/New_York)'
})
}
const useAmPm = config.schedule.useAmPm
const time12 = (config.schedule as unknown as Record<string, unknown>)['time12']
const time24 = (config.schedule as unknown as Record<string, unknown>)['time24']
if (useAmPm === true && (!time12 || (typeof time12 === 'string' && time12.trim() === ''))) {
issues.push({
severity: 'error',
field: 'schedule.time12',
message: 'useAmPm is true but time12 is empty'
})
}
if (useAmPm === false && (!time24 || (typeof time24 === 'string' && time24.trim() === ''))) {
issues.push({
severity: 'error',
field: 'schedule.time24',
message: 'useAmPm is false but time24 is empty'
})
}
}
// Check workers
if (config.workers) {
const allDisabled = !config.workers.doDailySet &&
!config.workers.doMorePromotions &&
!config.workers.doPunchCards &&
!config.workers.doDesktopSearch &&
!config.workers.doMobileSearch &&
!config.workers.doDailyCheckIn &&
!config.workers.doReadToEarn
if (allDisabled) {
issues.push({
severity: 'warning',
field: 'workers',
message: 'All workers are disabled - bot will not perform any tasks',
suggestion: 'Enable at least one worker type'
})
}
}
// Check diagnostics
if (config.diagnostics?.enabled) {
const maxPerRun = config.diagnostics.maxPerRun || 2
if (maxPerRun > 20) {
issues.push({
severity: 'warning',
field: 'diagnostics.maxPerRun',
message: 'Very high maxPerRun may fill disk quickly'
})
}
const retention = config.diagnostics.retentionDays || 7
if (retention > 90) {
issues.push({
severity: 'info',
field: 'diagnostics.retentionDays',
message: 'Long retention period - monitor disk usage'
})
}
}
const valid = !issues.some(i => i.severity === 'error')
return { valid, issues }
}
/**
* Validate accounts.json
*/
static validateAccounts(accounts: Account[]): ValidationResult {
const issues: ValidationIssue[] = []
if (accounts.length === 0) {
issues.push({
severity: 'error',
field: 'accounts',
message: 'No accounts found in accounts.json'
})
return { valid: false, issues }
}
const seenEmails = new Set<string>()
const seenProxies = new Map<string, string[]>() // proxy -> [emails]
for (let i = 0; i < accounts.length; i++) {
const acc = accounts[i]
const prefix = `accounts[${i}]`
if (!acc) continue
// Check email
if (!acc.email || acc.email.trim() === '') {
issues.push({
severity: 'error',
field: `${prefix}.email`,
message: 'Account email is empty'
})
} else {
if (seenEmails.has(acc.email)) {
issues.push({
severity: 'error',
field: `${prefix}.email`,
message: `Duplicate email: ${acc.email}`
})
}
seenEmails.add(acc.email)
if (!/@/.test(acc.email)) {
issues.push({
severity: 'error',
field: `${prefix}.email`,
message: 'Invalid email format'
})
}
}
// Check password
if (!acc.password || acc.password.trim() === '') {
issues.push({
severity: 'error',
field: `${prefix}.password`,
message: 'Account password is empty'
})
} else if (acc.password.length < 8) {
issues.push({
severity: 'warning',
field: `${prefix}.password`,
message: 'Very short password - verify it\'s correct'
})
}
// Check proxy
if (acc.proxy) {
const proxyUrl = acc.proxy.url
if (proxyUrl && proxyUrl.trim() !== '') {
if (!acc.proxy.port) {
issues.push({
severity: 'error',
field: `${prefix}.proxy.port`,
message: 'Proxy URL specified but port is missing'
})
}
// Track proxy reuse
const proxyKey = `${proxyUrl}:${acc.proxy.port}`
if (!seenProxies.has(proxyKey)) {
seenProxies.set(proxyKey, [])
}
seenProxies.get(proxyKey)?.push(acc.email)
}
}
// Check TOTP
if (acc.totp && acc.totp.trim() !== '') {
if (acc.totp.length < 16) {
issues.push({
severity: 'warning',
field: `${prefix}.totp`,
message: 'TOTP secret seems too short - verify it\'s correct'
})
}
}
}
// Warn about excessive proxy reuse
for (const [proxyKey, emails] of seenProxies) {
if (emails.length > 3) {
issues.push({
severity: 'warning',
field: 'accounts.proxy',
message: `Proxy ${proxyKey} used by ${emails.length} accounts - may trigger rate limits`,
suggestion: 'Use different proxies per account for better safety'
})
}
}
const valid = !issues.some(i => i.severity === 'error')
return { valid, issues }
}
/**
* Validate both config and accounts together (cross-checks)
*/
static validateAll(config: Config, accounts: Account[]): ValidationResult {
const configResult = this.validateConfig(config)
const accountsResult = this.validateAccounts(accounts)
const issues = [...configResult.issues, ...accountsResult.issues]
// Cross-validation: clusters vs accounts
if (accounts.length > 0 && config.clusters > accounts.length) {
issues.push({
severity: 'info',
field: 'clusters',
message: `${config.clusters} clusters configured but only ${accounts.length} account(s)`,
suggestion: 'Reduce clusters to match account count for efficiency'
})
}
// Cross-validation: parallel mode with single account
if (config.parallel && accounts.length === 1) {
issues.push({
severity: 'info',
field: 'parallel',
message: 'Parallel mode enabled with single account has no effect',
suggestion: 'Disable parallel mode or add more accounts'
})
}
const valid = !issues.some(i => i.severity === 'error')
return { valid, issues }
}
/**
* Load and validate from file paths
*/
static validateFromFiles(configPath: string, accountsPath: string): ValidationResult {
try {
if (!fs.existsSync(configPath)) {
return {
valid: false,
issues: [{
severity: 'error',
field: 'config',
message: `Config file not found: ${configPath}`
}]
}
}
if (!fs.existsSync(accountsPath)) {
return {
valid: false,
issues: [{
severity: 'error',
field: 'accounts',
message: `Accounts file not found: ${accountsPath}`
}]
}
}
const configRaw = fs.readFileSync(configPath, 'utf-8')
const accountsRaw = fs.readFileSync(accountsPath, 'utf-8')
// Remove JSONC comments (basic approach)
const configJson = configRaw.replace(/\/\*[\s\S]*?\*\/|\/\/.*/g, '')
const config: Config = JSON.parse(configJson)
const accounts: Account[] = JSON.parse(accountsRaw)
return this.validateAll(config, accounts)
} catch (error) {
return {
valid: false,
issues: [{
severity: 'error',
field: 'parse',
message: `Failed to parse files: ${error instanceof Error ? error.message : String(error)}`
}]
}
}
}
/**
* Print validation results to console with color
* Note: This method intentionally uses console.log for CLI output formatting
*/
static printResults(result: ValidationResult): void {
if (result.valid) {
console.log('✅ Configuration validation passed\n')
} else {
console.log('❌ Configuration validation failed\n')
}
if (result.issues.length === 0) {
console.log('No issues found.')
return
}
const errors = result.issues.filter(i => i.severity === 'error')
const warnings = result.issues.filter(i => i.severity === 'warning')
const infos = result.issues.filter(i => i.severity === 'info')
if (errors.length > 0) {
console.log(`\n🚫 ERRORS (${errors.length}):`)
for (const issue of errors) {
console.log(` ${issue.field}: ${issue.message}`)
if (issue.suggestion) {
console.log(`${issue.suggestion}`)
}
}
}
if (warnings.length > 0) {
console.log(`\n⚠ WARNINGS (${warnings.length}):`)
for (const issue of warnings) {
console.log(` ${issue.field}: ${issue.message}`)
if (issue.suggestion) {
console.log(`${issue.suggestion}`)
}
}
}
if (infos.length > 0) {
console.log(`\n INFO (${infos.length}):`)
for (const issue of infos) {
console.log(` ${issue.field}: ${issue.message}`)
if (issue.suggestion) {
console.log(`${issue.suggestion}`)
}
}
}
console.log()
}
private static parseTimeout(value: number | string): number {
if (typeof value === 'number') return value
const str = String(value).toLowerCase()
if (str.endsWith('ms')) return parseInt(str, 10)
if (str.endsWith('s')) return parseInt(str, 10) * 1000
if (str.endsWith('min')) return parseInt(str, 10) * 60000
return parseInt(str, 10) || 30000
}
}

74
src/util/Diagnostics.ts Normal file
View File

@@ -0,0 +1,74 @@
import path from 'path'
import fs from 'fs'
import type { Page } from 'rebrowser-playwright'
import type { MicrosoftRewardsBot } from '../index'
export type DiagnosticsScope = 'default' | 'security'
export interface DiagnosticsOptions {
scope?: DiagnosticsScope
skipSlot?: boolean
force?: boolean
}
export async function captureDiagnostics(bot: MicrosoftRewardsBot, page: Page, rawLabel: string, options?: DiagnosticsOptions): Promise<void> {
try {
const scope: DiagnosticsScope = options?.scope ?? 'default'
const cfg = bot.config?.diagnostics ?? {}
const forceCapture = options?.force === true || scope === 'security'
if (!forceCapture && cfg.enabled === false) return
if (scope === 'default') {
const maxPerRun = typeof cfg.maxPerRun === 'number' ? cfg.maxPerRun : 8
if (!options?.skipSlot && !bot.tryReserveDiagSlot(maxPerRun)) return
}
const saveScreenshot = scope === 'security' ? true : cfg.saveScreenshot !== false
const saveHtml = scope === 'security' ? true : cfg.saveHtml !== false
if (!saveScreenshot && !saveHtml) return
const safeLabel = rawLabel.replace(/[^a-z0-9-_]/gi, '_').slice(0, 64) || 'capture'
const now = new Date()
const timestamp = `${String(now.getHours()).padStart(2, '0')}${String(now.getMinutes()).padStart(2, '0')}${String(now.getSeconds()).padStart(2, '0')}`
let dir: string
if (scope === 'security') {
const base = path.join(process.cwd(), 'diagnostics', 'security-incidents')
fs.mkdirSync(base, { recursive: true })
const sub = `${now.toISOString().replace(/[:.]/g, '-')}-${safeLabel}`
dir = path.join(base, sub)
fs.mkdirSync(dir, { recursive: true })
} else {
const day = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`
dir = path.join(process.cwd(), 'reports', day)
fs.mkdirSync(dir, { recursive: true })
}
if (saveScreenshot) {
const shotName = scope === 'security' ? 'page.png' : `${timestamp}_${safeLabel}.png`
const shotPath = path.join(dir, shotName)
await page.screenshot({ path: shotPath }).catch(() => {})
if (scope === 'security') {
bot.log(bot.isMobile, 'DIAG', `Saved security screenshot to ${shotPath}`)
} else {
bot.log(bot.isMobile, 'DIAG', `Saved diagnostics screenshot to ${shotPath}`)
}
}
if (saveHtml) {
const htmlName = scope === 'security' ? 'page.html' : `${timestamp}_${safeLabel}.html`
const htmlPath = path.join(dir, htmlName)
try {
const html = await page.content()
await fs.promises.writeFile(htmlPath, html, 'utf-8')
if (scope === 'security') {
bot.log(bot.isMobile, 'DIAG', `Saved security HTML to ${htmlPath}`)
}
} catch {
/* ignore */
}
}
} catch (error) {
bot.log(bot.isMobile, 'DIAG', `Failed to capture diagnostics: ${error instanceof Error ? error.message : error}`, 'warn')
}
}

54
src/util/Humanizer.ts Normal file
View File

@@ -0,0 +1,54 @@
import { Page } from 'rebrowser-playwright'
import Util from './Utils'
import type { ConfigHumanization } from '../interface/Config'
export class Humanizer {
private util: Util
private cfg: ConfigHumanization | undefined
constructor(util: Util, cfg?: ConfigHumanization) {
this.util = util
this.cfg = cfg
}
async microGestures(page: Page): Promise<void> {
if (this.cfg && this.cfg.enabled === false) return
const moveProb = this.cfg?.gestureMoveProb ?? 0.4
const scrollProb = this.cfg?.gestureScrollProb ?? 0.2
try {
if (Math.random() < moveProb) {
const x = Math.floor(Math.random() * 40) + 5
const y = Math.floor(Math.random() * 30) + 5
await page.mouse.move(x, y, { steps: 2 }).catch(() => {})
}
if (Math.random() < scrollProb) {
const dy = (Math.random() < 0.5 ? 1 : -1) * (Math.floor(Math.random() * 150) + 50)
await page.mouse.wheel(0, dy).catch(() => {})
}
} catch {/* noop */}
}
async actionPause(): Promise<void> {
if (this.cfg && this.cfg.enabled === false) return
const defMin = 150
const defMax = 450
let min = defMin
let max = defMax
if (this.cfg?.actionDelay) {
const parse = (v: number | string) => {
if (typeof v === 'number') return v
try {
const n = this.util.stringToMs(String(v))
return Math.max(0, Math.min(n, 10_000))
} catch { return defMin }
}
min = parse(this.cfg.actionDelay.min)
max = parse(this.cfg.actionDelay.max)
if (min > max) [min, max] = [max, min]
max = Math.min(max, 5_000)
}
await this.util.wait(this.util.randomNumber(min, max))
}
}
export default Humanizer

104
src/util/JobState.ts Normal file
View File

@@ -0,0 +1,104 @@
import fs from 'fs'
import path from 'path'
import type { Config } from '../interface/Config'
type AccountCompletionMeta = {
runId?: string
completedAt: string
totalCollected?: number
banned?: boolean
errors?: number
}
type DayState = {
doneOfferIds: string[]
accountCompleted?: boolean
accountMeta?: AccountCompletionMeta
}
type FileState = {
days: Record<string, DayState>
}
export class JobState {
private baseDir: string
constructor(cfg: Config) {
const dir = cfg.jobState?.dir || path.join(process.cwd(), cfg.sessionPath, 'job-state')
this.baseDir = dir
if (!fs.existsSync(this.baseDir)) fs.mkdirSync(this.baseDir, { recursive: true })
}
private fileFor(email: string): string {
const safe = email.replace(/[^a-z0-9._-]/gi, '_')
return path.join(this.baseDir, `${safe}.json`)
}
private load(email: string): FileState {
const file = this.fileFor(email)
if (!fs.existsSync(file)) return { days: {} }
try {
const raw = fs.readFileSync(file, 'utf-8')
const parsed = JSON.parse(raw)
return parsed && typeof parsed === 'object' && parsed.days ? parsed as FileState : { days: {} }
} catch { return { days: {} } }
}
private save(email: string, state: FileState): void {
const file = this.fileFor(email)
fs.writeFileSync(file, JSON.stringify(state, null, 2), 'utf-8')
}
isDone(email: string, day: string, offerId: string): boolean {
const st = this.load(email)
const d = st.days[day]
if (!d) return false
return d.doneOfferIds.includes(offerId)
}
markDone(email: string, day: string, offerId: string): void {
const st = this.load(email)
if (!st.days[day]) st.days[day] = { doneOfferIds: [] }
const d = st.days[day]
if (!d.doneOfferIds.includes(offerId)) d.doneOfferIds.push(offerId)
this.save(email, st)
}
isAccountComplete(email: string, day: string): boolean {
const st = this.load(email)
const d = st.days[day]
return d?.accountCompleted === true
}
markAccountComplete(
email: string,
day: string,
meta?: { runId?: string; totalCollected?: number; banned?: boolean; errors?: number }
): void {
const st = this.load(email)
if (!st.days[day]) st.days[day] = { doneOfferIds: [] }
const d = st.days[day]
d.accountCompleted = true
d.accountMeta = {
completedAt: new Date().toISOString(),
runId: meta?.runId,
totalCollected: meta?.totalCollected,
banned: meta?.banned ?? false,
errors: meta?.errors ?? 0
}
this.save(email, st)
}
clearAccountComplete(email: string, day: string): void {
const st = this.load(email)
const d = st.days[day]
if (!d) return
if (d.accountCompleted || d.accountMeta) {
delete d.accountCompleted
delete d.accountMeta
this.save(email, st)
}
}
}
export default JobState

436
src/util/Load.ts Normal file
View File

@@ -0,0 +1,436 @@
import { BrowserContext, Cookie } from 'rebrowser-playwright'
import { BrowserFingerprintWithHeaders } from 'fingerprint-generator'
import fs from 'fs'
import path from 'path'
import { Account } from '../interface/Account'
import { Config, ConfigSaveFingerprint } from '../interface/Config'
let configCache: Config
let configSourcePath = ''
// Basic JSON comment stripper (supports // line and /* block */ comments while preserving strings)
function stripJsonComments(input: string): string {
let out = ''
let inString = false
let stringChar = ''
let inLine = false
let inBlock = false
for (let i = 0; i < input.length; i++) {
const ch = input[i]!
const next = input[i + 1]
if (inLine) {
if (ch === '\n' || ch === '\r') {
inLine = false
out += ch
}
continue
}
if (inBlock) {
if (ch === '*' && next === '/') {
inBlock = false
i++
}
continue
}
if (inString) {
out += ch
if (ch === '\\') { // escape next char
i++
if (i < input.length) out += input[i]
continue
}
if (ch === stringChar) {
inString = false
}
continue
}
if (ch === '"' || ch === '\'') {
inString = true
stringChar = ch
out += ch
continue
}
if (ch === '/' && next === '/') {
inLine = true
i++
continue
}
if (ch === '/' && next === '*') {
inBlock = true
i++
continue
}
out += ch
}
return out
}
// Normalize both legacy (flat) and new (nested) config schemas into the flat Config interface
function normalizeConfig(raw: unknown): Config {
// Using any here is necessary to support both legacy flat config and new nested config structures
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const n = (raw || {}) as any
// Browser / execution
const headless = n.browser?.headless ?? n.headless ?? false
const globalTimeout = n.browser?.globalTimeout ?? n.globalTimeout ?? '30s'
const parallel = n.execution?.parallel ?? n.parallel ?? false
const runOnZeroPoints = n.execution?.runOnZeroPoints ?? n.runOnZeroPoints ?? false
const clusters = n.execution?.clusters ?? n.clusters ?? 1
const passesPerRun = n.execution?.passesPerRun ?? n.passesPerRun
// Search
const useLocalQueries = n.search?.useLocalQueries ?? n.searchOnBingLocalQueries ?? false
const searchSettingsSrc = n.search?.settings ?? n.searchSettings ?? {}
const delaySrc = searchSettingsSrc.delay ?? searchSettingsSrc.searchDelay ?? { min: '3min', max: '5min' }
const searchSettings = {
useGeoLocaleQueries: !!(searchSettingsSrc.useGeoLocaleQueries ?? false),
scrollRandomResults: !!(searchSettingsSrc.scrollRandomResults ?? false),
clickRandomResults: !!(searchSettingsSrc.clickRandomResults ?? false),
retryMobileSearchAmount: Number(searchSettingsSrc.retryMobileSearchAmount ?? 2),
searchDelay: {
min: delaySrc.min ?? '3min',
max: delaySrc.max ?? '5min'
},
localFallbackCount: Number(searchSettingsSrc.localFallbackCount ?? 25),
extraFallbackRetries: Number(searchSettingsSrc.extraFallbackRetries ?? 1)
}
// Workers
const workers = n.workers ?? {
doDailySet: true,
doMorePromotions: true,
doPunchCards: true,
doDesktopSearch: true,
doMobileSearch: true,
doDailyCheckIn: true,
doReadToEarn: true,
bundleDailySetWithSearch: false
}
// Ensure missing flag gets a default
if (typeof workers.bundleDailySetWithSearch !== 'boolean') workers.bundleDailySetWithSearch = false
// Logging
const logging = n.logging ?? {}
const logExcludeFunc = Array.isArray(logging.excludeFunc) ? logging.excludeFunc : (n.logExcludeFunc ?? [])
const webhookLogExcludeFunc = Array.isArray(logging.webhookExcludeFunc) ? logging.webhookExcludeFunc : (n.webhookLogExcludeFunc ?? [])
// Notifications
const notifications = n.notifications ?? {}
const webhook = notifications.webhook ?? n.webhook ?? { enabled: false, url: '' }
const conclusionWebhook = notifications.conclusionWebhook ?? n.conclusionWebhook ?? { enabled: false, url: '' }
const ntfy = notifications.ntfy ?? n.ntfy ?? { enabled: false, url: '', topic: '', authToken: '' }
// Buy Mode
const buyMode = n.buyMode ?? {}
const buyModeEnabled = typeof buyMode.enabled === 'boolean' ? buyMode.enabled : false
const buyModeMax = typeof buyMode.maxMinutes === 'number' ? buyMode.maxMinutes : 45
// Fingerprinting
const saveFingerprint = (n.fingerprinting?.saveFingerprint ?? n.saveFingerprint) ?? { mobile: false, desktop: false }
// Humanization defaults (single on/off)
if (!n.humanization) n.humanization = {}
if (typeof n.humanization.enabled !== 'boolean') n.humanization.enabled = true
if (typeof n.humanization.stopOnBan !== 'boolean') n.humanization.stopOnBan = false
if (typeof n.humanization.immediateBanAlert !== 'boolean') n.humanization.immediateBanAlert = true
if (typeof n.humanization.randomOffDaysPerWeek !== 'number') {
n.humanization.randomOffDaysPerWeek = 1
}
// Strong default gestures when enabled (explicit values still win)
if (typeof n.humanization.gestureMoveProb !== 'number') {
n.humanization.gestureMoveProb = !n.humanization.enabled ? 0 : 0.5
}
if (typeof n.humanization.gestureScrollProb !== 'number') {
n.humanization.gestureScrollProb = !n.humanization.enabled ? 0 : 0.25
}
// Vacation mode (monthly contiguous off-days)
if (!n.vacation) n.vacation = {}
if (typeof n.vacation.enabled !== 'boolean') n.vacation.enabled = false
const vMin = Number(n.vacation.minDays)
const vMax = Number(n.vacation.maxDays)
n.vacation.minDays = isFinite(vMin) && vMin > 0 ? Math.floor(vMin) : 3
n.vacation.maxDays = isFinite(vMax) && vMax > 0 ? Math.floor(vMax) : 5
if (n.vacation.maxDays < n.vacation.minDays) {
const t = n.vacation.minDays; n.vacation.minDays = n.vacation.maxDays; n.vacation.maxDays = t
}
const riskRaw = (n.riskManagement ?? {}) as Record<string, unknown>
const hasRiskCfg = Object.keys(riskRaw).length > 0
const riskManagement = hasRiskCfg ? {
enabled: riskRaw.enabled === true,
autoAdjustDelays: riskRaw.autoAdjustDelays !== false,
stopOnCritical: riskRaw.stopOnCritical === true,
banPrediction: riskRaw.banPrediction === true,
riskThreshold: typeof riskRaw.riskThreshold === 'number' ? riskRaw.riskThreshold : undefined
} : undefined
const analyticsRaw = (n.analytics ?? {}) as Record<string, unknown>
const hasAnalyticsCfg = Object.keys(analyticsRaw).length > 0
const analytics = hasAnalyticsCfg ? {
enabled: analyticsRaw.enabled === true,
retentionDays: typeof analyticsRaw.retentionDays === 'number' ? analyticsRaw.retentionDays : undefined,
exportMarkdown: analyticsRaw.exportMarkdown === true,
webhookSummary: analyticsRaw.webhookSummary === true
} : undefined
const queryDiversityRaw = (n.queryDiversity ?? {}) as Record<string, unknown>
const hasQueryCfg = Object.keys(queryDiversityRaw).length > 0
const queryDiversity = hasQueryCfg ? {
enabled: queryDiversityRaw.enabled === true,
sources: Array.isArray(queryDiversityRaw.sources) && queryDiversityRaw.sources.length
? (queryDiversityRaw.sources.filter((s: unknown) => typeof s === 'string') as Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>)
: undefined,
maxQueriesPerSource: typeof queryDiversityRaw.maxQueriesPerSource === 'number' ? queryDiversityRaw.maxQueriesPerSource : undefined,
cacheMinutes: typeof queryDiversityRaw.cacheMinutes === 'number' ? queryDiversityRaw.cacheMinutes : undefined
} : undefined
const dryRun = n.dryRun === true
const jobStateRaw = (n.jobState ?? {}) as Record<string, unknown>
const jobState = {
enabled: jobStateRaw.enabled !== false,
dir: typeof jobStateRaw.dir === 'string' ? jobStateRaw.dir : undefined,
skipCompletedAccounts: jobStateRaw.skipCompletedAccounts !== false
}
const cfg: Config = {
baseURL: n.baseURL ?? 'https://rewards.bing.com',
sessionPath: n.sessionPath ?? 'sessions',
headless,
parallel,
runOnZeroPoints,
clusters,
saveFingerprint,
workers,
searchOnBingLocalQueries: !!useLocalQueries,
globalTimeout,
searchSettings,
humanization: n.humanization,
retryPolicy: n.retryPolicy,
jobState,
logExcludeFunc,
webhookLogExcludeFunc,
logging, // retain full logging object for live webhook usage
proxy: n.proxy ?? { proxyGoogleTrends: true, proxyBingTerms: true },
webhook,
conclusionWebhook,
ntfy,
diagnostics: n.diagnostics,
update: n.update,
schedule: n.schedule,
passesPerRun: passesPerRun,
vacation: n.vacation,
buyMode: { enabled: buyModeEnabled, maxMinutes: buyModeMax },
crashRecovery: n.crashRecovery || {},
riskManagement,
analytics,
dryRun,
queryDiversity
}
return cfg
}
export function loadAccounts(): Account[] {
try {
// 1) CLI dev override
let file = 'accounts.json'
if (process.argv.includes('-dev')) {
file = 'accounts.dev.json'
}
// 2) Docker-friendly env overrides
const envJson = process.env.ACCOUNTS_JSON
const envFile = process.env.ACCOUNTS_FILE
let raw: string | undefined
if (envJson && envJson.trim().startsWith('[')) {
raw = envJson
} else if (envFile && envFile.trim()) {
const full = path.isAbsolute(envFile) ? envFile : path.join(process.cwd(), envFile)
if (!fs.existsSync(full)) {
throw new Error(`ACCOUNTS_FILE not found: ${full}`)
}
raw = fs.readFileSync(full, 'utf-8')
} else {
// Try multiple locations to support both root mounts and dist mounts
// Support both .json and .jsonc extensions
const candidates = [
path.join(__dirname, '../', file), // root/accounts.json (preferred)
path.join(__dirname, '../', file + 'c'), // root/accounts.jsonc
path.join(__dirname, '../src', file), // fallback: file kept inside src/
path.join(__dirname, '../src', file + 'c'), // src/accounts.jsonc
path.join(process.cwd(), file), // cwd override
path.join(process.cwd(), file + 'c'), // cwd/accounts.jsonc
path.join(process.cwd(), 'src', file), // cwd/src/accounts.json
path.join(process.cwd(), 'src', file + 'c'), // cwd/src/accounts.jsonc
path.join(__dirname, file), // dist/accounts.json (legacy)
path.join(__dirname, file + 'c') // dist/accounts.jsonc
]
let chosen: string | null = null
for (const p of candidates) {
try { if (fs.existsSync(p)) { chosen = p; break } } catch { /* ignore */ }
}
if (!chosen) throw new Error(`accounts file not found in: ${candidates.join(' | ')}`)
raw = fs.readFileSync(chosen, 'utf-8')
}
// Support comments in accounts file (same as config)
const cleaned = stripJsonComments(raw)
const parsedUnknown = JSON.parse(cleaned)
// Accept either a root array or an object with an `accounts` array, ignore `_note`
const parsed = Array.isArray(parsedUnknown) ? parsedUnknown : (parsedUnknown && typeof parsedUnknown === 'object' && Array.isArray((parsedUnknown as { accounts?: unknown }).accounts) ? (parsedUnknown as { accounts: unknown[] }).accounts : null)
if (!Array.isArray(parsed)) throw new Error('accounts must be an array')
// minimal shape validation
for (const entry of parsed) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const a = entry as any
if (!a || typeof a.email !== 'string' || typeof a.password !== 'string') {
throw new Error('each account must have email and password strings')
}
a.email = String(a.email).trim()
a.password = String(a.password)
if (typeof a.recoveryEmail !== 'string') {
throw new Error(`account ${a.email || '<unknown>'} must include a recoveryEmail string`)
}
a.recoveryEmail = String(a.recoveryEmail).trim()
if (!a.recoveryEmail || !/@/.test(a.recoveryEmail)) {
throw new Error(`account ${a.email} recoveryEmail must be a valid email address`)
}
}
// Filter out disabled accounts (enabled: false)
const allAccounts = parsed as Account[]
const enabledAccounts = allAccounts.filter(acc => acc.enabled !== false)
return enabledAccounts
} catch (error) {
throw new Error(error as string)
}
}
export function getConfigPath(): string { return configSourcePath }
export function loadConfig(): Config {
try {
if (configCache) {
return configCache
}
// Resolve configuration file from common locations (supports .jsonc and .json)
const names = ['config.jsonc', 'config.json']
const bases = [
path.join(__dirname, '../'), // dist root when compiled
path.join(__dirname, '../src'), // fallback: running dist but config still in src
process.cwd(), // repo root
path.join(process.cwd(), 'src'), // repo/src when running ts-node
__dirname // dist/util
]
const candidates: string[] = []
for (const base of bases) {
for (const name of names) {
candidates.push(path.join(base, name))
}
}
let cfgPath: string | null = null
for (const p of candidates) {
try { if (fs.existsSync(p)) { cfgPath = p; break } } catch { /* ignore */ }
}
if (!cfgPath) throw new Error(`config.json not found in: ${candidates.join(' | ')}`)
const config = fs.readFileSync(cfgPath, 'utf-8')
const text = config.replace(/^\uFEFF/, '')
const raw = JSON.parse(stripJsonComments(text))
const normalized = normalizeConfig(raw)
configCache = normalized // Set as cache
configSourcePath = cfgPath
return normalized
} catch (error) {
throw new Error(error as string)
}
}
export async function loadSessionData(sessionPath: string, email: string, isMobile: boolean, saveFingerprint: ConfigSaveFingerprint) {
try {
// Fetch cookie file
const cookieFile = path.join(__dirname, '../browser/', sessionPath, email, `${isMobile ? 'mobile_cookies' : 'desktop_cookies'}.json`)
let cookies: Cookie[] = []
if (fs.existsSync(cookieFile)) {
const cookiesData = await fs.promises.readFile(cookieFile, 'utf-8')
cookies = JSON.parse(cookiesData)
}
// Fetch fingerprint file (support both legacy typo "fingerpint" and corrected "fingerprint")
const baseDir = path.join(__dirname, '../browser/', sessionPath, email)
const legacyFile = path.join(baseDir, `${isMobile ? 'mobile_fingerpint' : 'desktop_fingerpint'}.json`)
const correctFile = path.join(baseDir, `${isMobile ? 'mobile_fingerprint' : 'desktop_fingerprint'}.json`)
let fingerprint!: BrowserFingerprintWithHeaders
const shouldLoad = (saveFingerprint.desktop && !isMobile) || (saveFingerprint.mobile && isMobile)
if (shouldLoad) {
const chosen = fs.existsSync(correctFile) ? correctFile : (fs.existsSync(legacyFile) ? legacyFile : '')
if (chosen) {
const fingerprintData = await fs.promises.readFile(chosen, 'utf-8')
fingerprint = JSON.parse(fingerprintData)
}
}
return {
cookies: cookies,
fingerprint: fingerprint
}
} catch (error) {
throw new Error(error as string)
}
}
export async function saveSessionData(sessionPath: string, browser: BrowserContext, email: string, isMobile: boolean): Promise<string> {
try {
const cookies = await browser.cookies()
// Fetch path
const sessionDir = path.join(__dirname, '../browser/', sessionPath, email)
// Create session dir
if (!fs.existsSync(sessionDir)) {
await fs.promises.mkdir(sessionDir, { recursive: true })
}
// Save cookies to a file
await fs.promises.writeFile(
path.join(sessionDir, `${isMobile ? 'mobile_cookies' : 'desktop_cookies'}.json`),
JSON.stringify(cookies, null, 2)
)
return sessionDir
} catch (error) {
throw new Error(error as string)
}
}
export async function saveFingerprintData(sessionPath: string, email: string, isMobile: boolean, fingerprint: BrowserFingerprintWithHeaders): Promise<string> {
try {
// Fetch path
const sessionDir = path.join(__dirname, '../browser/', sessionPath, email)
// Create session dir
if (!fs.existsSync(sessionDir)) {
await fs.promises.mkdir(sessionDir, { recursive: true })
}
// Save fingerprint to files (write both legacy and corrected names for compatibility)
const legacy = path.join(sessionDir, `${isMobile ? 'mobile_fingerpint' : 'desktop_fingerpint'}.json`)
const correct = path.join(sessionDir, `${isMobile ? 'mobile_fingerprint' : 'desktop_fingerprint'}.json`)
const payload = JSON.stringify(fingerprint)
await fs.promises.writeFile(correct, payload)
try { await fs.promises.writeFile(legacy, payload) } catch { /* ignore */ }
return sessionDir
} catch (error) {
throw new Error(error as string)
}
}

252
src/util/Logger.ts Normal file
View File

@@ -0,0 +1,252 @@
import axios from 'axios'
import chalk from 'chalk'
import { Ntfy } from './Ntfy'
import { loadConfig } from './Load'
import { DISCORD } from '../constants'
const WEBHOOK_USERNAME = 'MS Rewards - Live Logs'
type WebhookBuffer = {
lines: string[]
sending: boolean
timer?: NodeJS.Timeout
}
const webhookBuffers = new Map<string, WebhookBuffer>()
// Periodic cleanup of old/idle webhook buffers to prevent memory leaks
setInterval(() => {
const now = Date.now()
const BUFFER_MAX_AGE_MS = 3600000 // 1 hour
for (const [url, buf] of webhookBuffers.entries()) {
if (!buf.sending && buf.lines.length === 0) {
const lastActivity = (buf as unknown as { lastActivity?: number }).lastActivity || 0
if (now - lastActivity > BUFFER_MAX_AGE_MS) {
webhookBuffers.delete(url)
}
}
}
}, 600000) // Check every 10 minutes
function getBuffer(url: string): WebhookBuffer {
let buf = webhookBuffers.get(url)
if (!buf) {
buf = { lines: [], sending: false }
webhookBuffers.set(url, buf)
}
// Track last activity for cleanup
(buf as unknown as { lastActivity: number }).lastActivity = Date.now()
return buf
}
async function sendBatch(url: string, buf: WebhookBuffer) {
if (buf.sending) return
buf.sending = true
while (buf.lines.length > 0) {
const chunk: string[] = []
let currentLength = 0
while (buf.lines.length > 0) {
const next = buf.lines[0]!
const projected = currentLength + next.length + (chunk.length > 0 ? 1 : 0)
if (projected > DISCORD.MAX_EMBED_LENGTH && chunk.length > 0) break
buf.lines.shift()
chunk.push(next)
currentLength = projected
}
const content = chunk.join('\n').slice(0, DISCORD.MAX_EMBED_LENGTH)
if (!content) {
continue
}
// Enhanced webhook payload with embed, username and avatar
const payload = {
username: WEBHOOK_USERNAME,
avatar_url: DISCORD.AVATAR_URL,
embeds: [{
description: `\`\`\`\n${content}\n\`\`\``,
color: determineColorFromContent(content),
timestamp: new Date().toISOString()
}]
}
try {
await axios.post(url, payload, { headers: { 'Content-Type': 'application/json' }, timeout: DISCORD.WEBHOOK_TIMEOUT })
await new Promise(resolve => setTimeout(resolve, DISCORD.RATE_LIMIT_DELAY))
} catch (error) {
// Re-queue failed batch at front and exit loop
buf.lines = chunk.concat(buf.lines)
console.error('[Webhook] live log delivery failed:', error)
break
}
}
buf.sending = false
}
function determineColorFromContent(content: string): number {
const lower = content.toLowerCase()
// Security/Ban alerts - Red
if (lower.includes('[banned]') || lower.includes('[security]') || lower.includes('suspended') || lower.includes('compromised')) {
return DISCORD.COLOR_RED
}
// Errors - Dark Red
if (lower.includes('[error]') || lower.includes('✗')) {
return DISCORD.COLOR_CRIMSON
}
// Warnings - Orange/Yellow
if (lower.includes('[warn]') || lower.includes('⚠')) {
return DISCORD.COLOR_ORANGE
}
// Success - Green
if (lower.includes('[ok]') || lower.includes('✓') || lower.includes('complet')) {
return DISCORD.COLOR_GREEN
}
// Info/Main - Blue
if (lower.includes('[main]')) {
return DISCORD.COLOR_BLUE
}
// Default - Gray
return 0x95A5A6 // Gray
}
function enqueueWebhookLog(url: string, line: string) {
const buf = getBuffer(url)
buf.lines.push(line)
if (!buf.timer) {
buf.timer = setTimeout(() => {
buf.timer = undefined
void sendBatch(url, buf)
}, DISCORD.DEBOUNCE_DELAY)
}
}
// Synchronous logger that returns an Error when type === 'error' so callers can `throw log(...)` safely.
export function log(isMobile: boolean | 'main', title: string, message: string, type: 'log' | 'warn' | 'error' = 'log', color?: keyof typeof chalk): Error | void {
const configData = loadConfig()
// Access logging config with fallback for backward compatibility
const configAny = configData as unknown as Record<string, unknown>
const logging = configAny.logging as { excludeFunc?: string[]; logExcludeFunc?: string[] } | undefined
const logExcludeFunc = logging?.excludeFunc ?? (configData as { logExcludeFunc?: string[] }).logExcludeFunc ?? []
if (logExcludeFunc.some((x: string) => x.toLowerCase() === title.toLowerCase())) {
return
}
const currentTime = new Date().toLocaleString()
const platformText = isMobile === 'main' ? 'MAIN' : isMobile ? 'MOBILE' : 'DESKTOP'
// Clean string for notifications (no chalk, structured)
type LoggingCfg = { excludeFunc?: string[]; webhookExcludeFunc?: string[]; redactEmails?: boolean }
const loggingCfg: LoggingCfg = (configAny.logging || {}) as LoggingCfg
const shouldRedact = !!loggingCfg.redactEmails
const redact = (s: string) => shouldRedact ? s.replace(/[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}/ig, (m) => {
const [u, d] = m.split('@'); return `${(u||'').slice(0,2)}***@${d||''}`
}) : s
const cleanStr = redact(`[${currentTime}] [PID: ${process.pid}] [${type.toUpperCase()}] ${platformText} [${title}] ${message}`)
// Define conditions for sending to NTFY
const ntfyConditions = {
log: [
message.toLowerCase().includes('started tasks for account'),
message.toLowerCase().includes('press the number'),
message.toLowerCase().includes('no points to earn')
],
error: [],
warn: [
message.toLowerCase().includes('aborting'),
message.toLowerCase().includes('didn\'t gain')
]
}
// Check if the current log type and message meet the NTFY conditions
try {
if (type in ntfyConditions && ntfyConditions[type as keyof typeof ntfyConditions].some(condition => condition)) {
// Fire-and-forget
Promise.resolve(Ntfy(cleanStr, type)).catch(() => { /* ignore ntfy errors */ })
}
} catch { /* ignore */ }
// Console output with better formatting and contextual icons
const typeIndicator = type === 'error' ? '✗' : type === 'warn' ? '⚠' : '✓'
const platformColor = isMobile === 'main' ? chalk.cyan : isMobile ? chalk.blue : chalk.magenta
const typeColor = type === 'error' ? chalk.red : type === 'warn' ? chalk.yellow : chalk.green
// Add contextual icon based on title/message (ASCII-safe for Windows PowerShell)
const titleLower = title.toLowerCase()
const msgLower = message.toLowerCase()
// ASCII-safe icons for Windows PowerShell compatibility
const iconMap: Array<[RegExp, string]> = [
[/security|compromised/i, '[SECURITY]'],
[/ban|suspend/i, '[BANNED]'],
[/error/i, '[ERROR]'],
[/warn/i, '[WARN]'],
[/success|complet/i, '[OK]'],
[/login/i, '[LOGIN]'],
[/point/i, '[POINTS]'],
[/search/i, '[SEARCH]'],
[/activity|quiz|poll/i, '[ACTIVITY]'],
[/browser/i, '[BROWSER]'],
[/main/i, '[MAIN]']
]
let icon = ''
for (const [pattern, symbol] of iconMap) {
if (pattern.test(titleLower) || pattern.test(msgLower)) {
icon = chalk.dim(symbol)
break
}
}
const iconPart = icon ? icon + ' ' : ''
const formattedStr = [
chalk.gray(`[${currentTime}]`),
chalk.gray(`[${process.pid}]`),
typeColor(`${typeIndicator}`),
platformColor(`[${platformText}]`),
chalk.bold(`[${title}]`),
iconPart + redact(message)
].join(' ')
const applyChalk = color && typeof chalk[color] === 'function' ? chalk[color] as (msg: string) => string : null
// Log based on the type
switch (type) {
case 'warn':
applyChalk ? console.warn(applyChalk(formattedStr)) : console.warn(formattedStr)
break
case 'error':
applyChalk ? console.error(applyChalk(formattedStr)) : console.error(formattedStr)
break
default:
applyChalk ? console.log(applyChalk(formattedStr)) : console.log(formattedStr)
break
}
// Webhook streaming (live logs)
try {
const loggingCfg: Record<string, unknown> = (configAny.logging || {}) as Record<string, unknown>
const webhookCfg = configData.webhook
const liveUrlRaw = typeof loggingCfg.liveWebhookUrl === 'string' ? loggingCfg.liveWebhookUrl.trim() : ''
const liveUrl = liveUrlRaw || (webhookCfg?.enabled && webhookCfg.url ? webhookCfg.url : '')
const webhookExclude = Array.isArray(loggingCfg.webhookExcludeFunc) ? loggingCfg.webhookExcludeFunc : configData.webhookLogExcludeFunc || []
const webhookExcluded = Array.isArray(webhookExclude) && webhookExclude.some((x: string) => x.toLowerCase() === title.toLowerCase())
if (liveUrl && !webhookExcluded) {
enqueueWebhookLog(liveUrl, cleanStr)
}
} catch (error) {
console.error('[Logger] Failed to enqueue webhook log:', error)
}
// Return an Error when logging an error so callers can `throw log(...)`
if (type === 'error') {
// CommunityReporter disabled per project policy
return new Error(cleanStr)
}
}

27
src/util/Ntfy.ts Normal file
View File

@@ -0,0 +1,27 @@
import { loadConfig } from './Load'
import axios from 'axios'
const NOTIFICATION_TYPES = {
error: { priority: 'max', tags: 'rotating_light' }, // Customize the ERROR icon here, see: https://docs.ntfy.sh/emojis/
warn: { priority: 'high', tags: 'warning' }, // Customize the WARN icon here, see: https://docs.ntfy.sh/emojis/
log: { priority: 'default', tags: 'medal_sports' } // Customize the LOG icon here, see: https://docs.ntfy.sh/emojis/
}
export async function Ntfy(message: string, type: keyof typeof NOTIFICATION_TYPES = 'log'): Promise<void> {
const config = loadConfig().ntfy
if (!config?.enabled || !config.url || !config.topic) return
try {
const { priority, tags } = NOTIFICATION_TYPES[type]
const headers = {
Title: 'Microsoft Rewards Script',
Priority: priority,
Tags: tags,
...(config.authToken && { Authorization: `Bearer ${config.authToken}` })
}
await axios.post(`${config.url}/${config.topic}`, message, { headers })
} catch (error) {
// Silently fail - NTFY is a non-critical notification service
}
}

View File

@@ -0,0 +1,340 @@
import axios from 'axios'
export interface QuerySource {
name: string
weight: number // 0-1, probability of selection
fetchQueries: () => Promise<string[]>
}
export interface QueryDiversityConfig {
sources: Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>
deduplicate: boolean
mixStrategies: boolean // Mix different source types in same session
maxQueriesPerSource: number
cacheMinutes: number
}
/**
* QueryDiversityEngine fetches search queries from multiple sources to avoid patterns.
* Supports Google Trends, Reddit, News APIs, Wikipedia, and local fallbacks.
*/
export class QueryDiversityEngine {
private config: QueryDiversityConfig
private cache: Map<string, { queries: string[]; expires: number }> = new Map()
constructor(config?: Partial<QueryDiversityConfig>) {
this.config = {
sources: config?.sources || ['google-trends', 'reddit', 'local-fallback'],
deduplicate: config?.deduplicate !== false,
mixStrategies: config?.mixStrategies !== false,
maxQueriesPerSource: config?.maxQueriesPerSource || 10,
cacheMinutes: config?.cacheMinutes || 30
}
}
/**
* Fetch diverse queries from configured sources
*/
async fetchQueries(count: number): Promise<string[]> {
const allQueries: string[] = []
for (const sourceName of this.config.sources) {
try {
const queries = await this.getFromSource(sourceName)
allQueries.push(...queries.slice(0, this.config.maxQueriesPerSource))
} catch (error) {
// Silently fail and try other sources
}
}
// Deduplicate
let final = this.config.deduplicate ? Array.from(new Set(allQueries)) : allQueries
// Mix strategies: interleave queries from different sources
if (this.config.mixStrategies && this.config.sources.length > 1) {
final = this.interleaveQueries(final, count)
}
// Shuffle and limit to requested count
final = this.shuffleArray(final).slice(0, count)
return final.length > 0 ? final : this.getLocalFallback(count)
}
/**
* Fetch from a specific source with caching
*/
private async getFromSource(source: string): Promise<string[]> {
const cached = this.cache.get(source)
if (cached && Date.now() < cached.expires) {
return cached.queries
}
let queries: string[] = []
switch (source) {
case 'google-trends':
queries = await this.fetchGoogleTrends()
break
case 'reddit':
queries = await this.fetchReddit()
break
case 'news':
queries = await this.fetchNews()
break
case 'wikipedia':
queries = await this.fetchWikipedia()
break
case 'local-fallback':
queries = this.getLocalFallback(20)
break
default:
// Unknown source, skip silently
break
}
this.cache.set(source, {
queries,
expires: Date.now() + (this.config.cacheMinutes * 60000)
})
return queries
}
/**
* Fetch from Google Trends (existing logic can be reused)
*/
private async fetchGoogleTrends(): Promise<string[]> {
try {
const response = await axios.get('https://trends.google.com/trends/api/dailytrends?geo=US', {
timeout: 10000
})
const data = response.data.toString().replace(')]}\',', '')
const parsed = JSON.parse(data)
const queries: string[] = []
for (const item of parsed.default.trendingSearchesDays || []) {
for (const search of item.trendingSearches || []) {
if (search.title?.query) {
queries.push(search.title.query)
}
}
}
return queries.slice(0, 20)
} catch {
return []
}
}
/**
* Fetch from Reddit (top posts from popular subreddits)
*/
private async fetchReddit(): Promise<string[]> {
try {
const subreddits = ['news', 'worldnews', 'todayilearned', 'askreddit', 'technology']
const randomSub = subreddits[Math.floor(Math.random() * subreddits.length)]
const response = await axios.get(`https://www.reddit.com/r/${randomSub}/hot.json?limit=15`, {
timeout: 10000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
})
const posts = response.data.data.children || []
const queries: string[] = []
for (const post of posts) {
const title = post.data?.title
if (title && title.length > 10 && title.length < 100) {
queries.push(title)
}
}
return queries
} catch {
return []
}
}
/**
* Fetch from News API (requires API key - fallback to headlines scraping)
*/
private async fetchNews(): Promise<string[]> {
try {
// Using NewsAPI.org free tier (limited requests)
const apiKey = process.env.NEWS_API_KEY
if (!apiKey) {
return this.fetchNewsFallback()
}
const response = await axios.get('https://newsapi.org/v2/top-headlines', {
params: {
country: 'us',
pageSize: 15,
apiKey
},
timeout: 10000
})
const articles = response.data.articles || []
return articles.map((a: { title?: string }) => a.title).filter((t: string | undefined) => t && t.length > 10)
} catch {
return this.fetchNewsFallback()
}
}
/**
* Fallback news scraper (BBC/CNN headlines)
*/
private async fetchNewsFallback(): Promise<string[]> {
try {
const response = await axios.get('https://www.bbc.com/news', {
timeout: 10000,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
})
const html = response.data
const regex = /<h3[^>]*>(.*?)<\/h3>/gi
const matches: RegExpMatchArray[] = []
let match
while ((match = regex.exec(html)) !== null) {
matches.push(match)
}
return matches
.map(m => m[1]?.replace(/<[^>]+>/g, '').trim())
.filter((t: string | undefined) => t && t.length > 10 && t.length < 100)
.slice(0, 10) as string[]
} catch {
return []
}
}
/**
* Fetch from Wikipedia (featured articles / trending topics)
*/
private async fetchWikipedia(): Promise<string[]> {
try {
const response = await axios.get('https://en.wikipedia.org/w/api.php', {
params: {
action: 'query',
list: 'random',
rnnamespace: 0,
rnlimit: 15,
format: 'json'
},
timeout: 10000
})
const pages = response.data.query?.random || []
return pages.map((p: { title?: string }) => p.title).filter((t: string | undefined) => t && t.length > 3)
} catch {
return []
}
}
/**
* Local fallback queries (curated list)
*/
private getLocalFallback(count: number): string[] {
const fallback = [
'weather forecast',
'news today',
'stock market',
'sports scores',
'movie reviews',
'recipes',
'travel destinations',
'health tips',
'technology news',
'best restaurants near me',
'how to cook pasta',
'python tutorial',
'world events',
'climate change',
'electric vehicles',
'space exploration',
'artificial intelligence',
'cryptocurrency',
'gaming news',
'fashion trends',
'fitness workout',
'home improvement',
'gardening tips',
'pet care',
'book recommendations',
'music charts',
'streaming shows',
'historical events',
'science discoveries',
'education resources'
]
return this.shuffleArray(fallback).slice(0, count)
}
/**
* Interleave queries from different sources for diversity
*/
private interleaveQueries(queries: string[], targetCount: number): string[] {
const result: string[] = []
const sourceMap = new Map<string, string[]>()
// Group queries by estimated source (simple heuristic)
for (const q of queries) {
const source = this.guessSource(q)
if (!sourceMap.has(source)) {
sourceMap.set(source, [])
}
sourceMap.get(source)?.push(q)
}
const sources = Array.from(sourceMap.values())
let index = 0
while (result.length < targetCount && sources.some(s => s.length > 0)) {
const source = sources[index % sources.length]
if (source && source.length > 0) {
const q = source.shift()
if (q) result.push(q)
}
index++
}
return result
}
/**
* Guess which source a query came from (basic heuristic)
*/
private guessSource(query: string): string {
if (/^[A-Z]/.test(query) && query.includes(' ')) return 'news'
if (query.length > 80) return 'reddit'
if (/how to|what is|why/i.test(query)) return 'local'
return 'trends'
}
/**
* Shuffle array (Fisher-Yates)
*/
private shuffleArray<T>(array: T[]): T[] {
const shuffled = [...array]
for (let i = shuffled.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[shuffled[i], shuffled[j]] = [shuffled[j]!, shuffled[i]!]
}
return shuffled
}
/**
* Clear cache (call between runs)
*/
clearCache(): void {
this.cache.clear()
}
}

63
src/util/Retry.ts Normal file
View File

@@ -0,0 +1,63 @@
import type { ConfigRetryPolicy } from '../interface/Config'
import Util from './Utils'
type NumericPolicy = {
maxAttempts: number
baseDelay: number
maxDelay: number
multiplier: number
jitter: number
}
export type Retryable<T> = () => Promise<T>
export class Retry {
private policy: NumericPolicy
constructor(policy?: ConfigRetryPolicy) {
const def: NumericPolicy = {
maxAttempts: 3,
baseDelay: 1000,
maxDelay: 30000,
multiplier: 2,
jitter: 0.2
}
const merged: ConfigRetryPolicy = { ...(policy || {}) }
// normalize string durations
const util = new Util()
const parse = (v: number | string) => {
if (typeof v === 'number') return v
try { return util.stringToMs(String(v)) } catch { return def.baseDelay }
}
this.policy = {
maxAttempts: (merged.maxAttempts as number) ?? def.maxAttempts,
baseDelay: parse(merged.baseDelay ?? def.baseDelay),
maxDelay: parse(merged.maxDelay ?? def.maxDelay),
multiplier: (merged.multiplier as number) ?? def.multiplier,
jitter: (merged.jitter as number) ?? def.jitter
}
}
async run<T>(fn: Retryable<T>, isRetryable?: (e: unknown) => boolean): Promise<T> {
let attempt = 0
let delay = this.policy.baseDelay
let lastErr: unknown
while (attempt < this.policy.maxAttempts) {
try {
return await fn()
} catch (e) {
lastErr = e
attempt += 1
const retry = isRetryable ? isRetryable(e) : true
if (!retry || attempt >= this.policy.maxAttempts) break
const jitter = 1 + (Math.random() * 2 - 1) * this.policy.jitter
const sleep = Math.min(this.policy.maxDelay, Math.max(0, Math.floor(delay * jitter)))
await new Promise((r) => setTimeout(r, sleep))
delay = Math.min(this.policy.maxDelay, Math.floor(delay * (this.policy.multiplier || 2)))
}
}
throw lastErr instanceof Error ? lastErr : new Error(String(lastErr))
}
}
export default Retry

177
src/util/RiskManager.ts Normal file
View File

@@ -0,0 +1,177 @@
import { AdaptiveThrottler } from './AdaptiveThrottler'
export interface RiskEvent {
type: 'captcha' | 'error' | 'timeout' | 'ban_hint' | 'success'
timestamp: number
severity: number // 0-10, higher = worse
context?: string
}
export interface RiskMetrics {
score: number // 0-100, higher = riskier
level: 'safe' | 'elevated' | 'high' | 'critical'
recommendation: string
delayMultiplier: number
}
/**
* RiskManager monitors account activity patterns and detects early ban signals.
* Integrates with AdaptiveThrottler to dynamically adjust delays based on risk.
*/
export class RiskManager {
private events: RiskEvent[] = []
private readonly maxEvents = 100
private readonly timeWindowMs = 3600000 // 1 hour
private throttler: AdaptiveThrottler
private cooldownUntil: number = 0
constructor(throttler?: AdaptiveThrottler) {
this.throttler = throttler || new AdaptiveThrottler()
}
/**
* Record a risk event (captcha, error, success, etc.)
*/
recordEvent(type: RiskEvent['type'], severity: number, context?: string): void {
const event: RiskEvent = {
type,
timestamp: Date.now(),
severity: Math.max(0, Math.min(10, severity)),
context
}
this.events.push(event)
if (this.events.length > this.maxEvents) {
this.events.shift()
}
// Feed success/error into adaptive throttler
if (type === 'success') {
this.throttler.record(true)
} else if (['error', 'captcha', 'timeout', 'ban_hint'].includes(type)) {
this.throttler.record(false)
}
// Auto cool-down on critical events
if (severity >= 8) {
const coolMs = Math.min(300000, severity * 30000) // max 5min
this.cooldownUntil = Date.now() + coolMs
}
}
/**
* Calculate current risk metrics based on recent events
*/
assessRisk(): RiskMetrics {
const now = Date.now()
const recentEvents = this.events.filter(e => now - e.timestamp < this.timeWindowMs)
if (recentEvents.length === 0) {
return {
score: 0,
level: 'safe',
recommendation: 'Normal operation',
delayMultiplier: 1.0
}
}
// Calculate base risk score (weighted by recency and severity)
let weightedSum = 0
let totalWeight = 0
for (const event of recentEvents) {
const age = now - event.timestamp
const recencyFactor = 1 - (age / this.timeWindowMs) // newer = higher weight
const weight = recencyFactor * (event.severity / 10)
weightedSum += weight * event.severity
totalWeight += weight
}
const baseScore = totalWeight > 0 ? (weightedSum / totalWeight) * 10 : 0
// Penalty for rapid event frequency
const eventRate = recentEvents.length / (this.timeWindowMs / 60000) // events per minute
const frequencyPenalty = Math.min(30, eventRate * 5)
// Bonus penalty for specific patterns
const captchaCount = recentEvents.filter(e => e.type === 'captcha').length
const banHintCount = recentEvents.filter(e => e.type === 'ban_hint').length
const patternPenalty = (captchaCount * 15) + (banHintCount * 25)
const finalScore = Math.min(100, baseScore + frequencyPenalty + patternPenalty)
// Determine risk level
let level: RiskMetrics['level']
let recommendation: string
let delayMultiplier: number
if (finalScore < 20) {
level = 'safe'
recommendation = 'Normal operation'
delayMultiplier = 1.0
} else if (finalScore < 40) {
level = 'elevated'
recommendation = 'Minor issues detected. Increasing delays slightly.'
delayMultiplier = 1.5
} else if (finalScore < 70) {
level = 'high'
recommendation = 'Significant risk detected. Applying heavy throttling.'
delayMultiplier = 2.5
} else {
level = 'critical'
recommendation = 'CRITICAL: High ban risk. Consider stopping or manual review.'
delayMultiplier = 4.0
}
// Apply adaptive throttler multiplier on top
const adaptiveMultiplier = this.throttler.getDelayMultiplier()
delayMultiplier *= adaptiveMultiplier
return {
score: Math.round(finalScore),
level,
recommendation,
delayMultiplier: Number(delayMultiplier.toFixed(2))
}
}
/**
* Check if currently in forced cool-down period
*/
isInCooldown(): boolean {
return Date.now() < this.cooldownUntil
}
/**
* Get remaining cool-down time in milliseconds
*/
getCooldownRemaining(): number {
const remaining = this.cooldownUntil - Date.now()
return Math.max(0, remaining)
}
/**
* Get the adaptive throttler instance for advanced usage
*/
getThrottler(): AdaptiveThrottler {
return this.throttler
}
/**
* Clear all events and reset state (use between accounts)
*/
reset(): void {
this.events = []
this.cooldownUntil = 0
// Keep throttler state across resets for learning
}
/**
* Export events for analytics/logging
*/
getRecentEvents(limitMinutes: number = 60): RiskEvent[] {
const cutoff = Date.now() - (limitMinutes * 60000)
return this.events.filter(e => e.timestamp >= cutoff)
}
}

View File

@@ -0,0 +1,728 @@
import fs from 'fs'
import path from 'path'
import chalk from 'chalk'
import { Config } from '../interface/Config'
import { Account } from '../interface/Account'
interface ValidationError {
severity: 'error' | 'warning'
category: string
message: string
fix?: string
docsLink?: string
}
export class StartupValidator {
private errors: ValidationError[] = []
private warnings: ValidationError[] = []
/**
* Run all validation checks before starting the bot.
* Always returns true - validation is informative, not blocking.
* Displays errors and warnings but lets execution continue.
*/
async validate(config: Config, accounts: Account[]): Promise<boolean> {
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'))
console.log(chalk.cyan(' 🔍 STARTUP VALIDATION - Checking Configuration'))
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'))
// Run all validation checks
this.validateAccounts(accounts)
this.validateConfig(config)
this.validateEnvironment()
this.validateFileSystem(config)
this.validateBrowserSettings(config)
this.validateScheduleSettings(config)
this.validateNetworkSettings(config)
this.validateWorkerSettings(config)
this.validateSearchSettings(config)
this.validateHumanizationSettings(config)
this.validateSecuritySettings(config)
// Display results (await to respect the delay)
await this.displayResults()
// Always return true - validation is informative only
// Users can proceed even with errors (they might be false positives)
return true
}
private validateAccounts(accounts: Account[]): void {
if (!accounts || accounts.length === 0) {
this.addError(
'accounts',
'No accounts found in accounts.json',
'Add at least one account to src/accounts.json or src/accounts.jsonc',
'docs/accounts.md'
)
return
}
accounts.forEach((account, index) => {
const prefix = `Account ${index + 1} (${account.email || 'unknown'})`
// Required: email
if (!account.email || typeof account.email !== 'string') {
this.addError(
'accounts',
`${prefix}: Missing or invalid email address`,
'Add a valid email address in the "email" field'
)
} else if (!/@/.test(account.email)) {
this.addError(
'accounts',
`${prefix}: Email format is invalid`,
'Email must contain @ symbol (e.g., user@example.com)'
)
}
// Required: password
if (!account.password || typeof account.password !== 'string') {
this.addError(
'accounts',
`${prefix}: Missing or invalid password`,
'Add your Microsoft account password in the "password" field'
)
} else if (account.password.length < 4) {
this.addWarning(
'accounts',
`${prefix}: Password seems too short (${account.password.length} characters)`,
'Verify this is your correct Microsoft account password'
)
}
// Required: recoveryEmail (NEW - mandatory field)
if (!account.recoveryEmail || typeof account.recoveryEmail !== 'string') {
this.addError(
'accounts',
`${prefix}: Missing required field "recoveryEmail"`,
'Add your recovery/backup email address. This is MANDATORY for security checks.\nExample: "recoveryEmail": "backup@gmail.com"',
'docs/accounts.md'
)
} else if (!/@/.test(account.recoveryEmail)) {
this.addError(
'accounts',
`${prefix}: Recovery email format is invalid`,
'Recovery email must be a valid email address (e.g., backup@gmail.com)'
)
} else if (account.recoveryEmail.trim() === '') {
this.addError(
'accounts',
`${prefix}: Recovery email cannot be empty`,
'Provide the actual recovery email associated with this Microsoft account'
)
}
// Optional but recommended: TOTP
if (!account.totp || account.totp.trim() === '') {
this.addWarning(
'accounts',
`${prefix}: No TOTP (2FA) secret configured`,
'Highly recommended: Set up 2FA and add your TOTP secret for automated login',
'docs/accounts.md'
)
} else {
const cleaned = account.totp.replace(/\s+/g, '')
if (cleaned.length < 16) {
this.addWarning(
'accounts',
`${prefix}: TOTP secret seems too short (${cleaned.length} chars)`,
'Verify you copied the complete Base32 secret from Microsoft Authenticator setup'
)
}
// Check if it's Base32 (A-Z, 2-7)
if (!/^[A-Z2-7\s]+$/i.test(account.totp)) {
this.addWarning(
'accounts',
`${prefix}: TOTP secret contains invalid characters`,
'TOTP secrets should only contain letters A-Z and numbers 2-7 (Base32 format)'
)
}
}
// Proxy validation
if (account.proxy) {
if (account.proxy.url && account.proxy.url.trim() !== '') {
if (!account.proxy.port || account.proxy.port <= 0) {
this.addError(
'accounts',
`${prefix}: Proxy URL provided but port is missing or invalid`,
'Add a valid proxy port number (e.g., 8080, 3128)'
)
}
}
}
})
}
private validateConfig(config: Config): void {
// Headless mode in Docker
if (process.env.FORCE_HEADLESS === '1' && config.headless === false) {
this.addWarning(
'config',
'FORCE_HEADLESS=1 but config.headless is false',
'Docker environment forces headless mode. Your config setting will be overridden.'
)
}
// Parallel mode warning
if (config.parallel === true) {
this.addWarning(
'config',
'Parallel mode enabled (desktop + mobile run simultaneously)',
'This uses more resources. Disable if you experience crashes or timeouts.',
'docs/config.md'
)
}
// Clusters validation
if (config.clusters > 1) {
this.addWarning(
'config',
`Clusters set to ${config.clusters} - accounts will run in parallel`,
'Ensure your system has enough resources (RAM, CPU) for concurrent execution'
)
}
// Global timeout validation
const timeout = typeof config.globalTimeout === 'string'
? config.globalTimeout
: `${config.globalTimeout}ms`
if (timeout === '0' || timeout === '0ms' || timeout === '0s') {
this.addError(
'config',
'Global timeout is set to 0',
'Set a reasonable timeout value (e.g., "30s", "60s") to prevent infinite hangs'
)
}
// Job state validation
if (config.jobState?.enabled === false) {
this.addWarning(
'config',
'Job state tracking is disabled',
'The bot will not save progress. If interrupted, all tasks will restart from scratch.',
'docs/jobstate.md'
)
}
// Risk management validation
if (config.riskManagement?.enabled === true) {
if (config.riskManagement.stopOnCritical === true) {
this.addWarning(
'config',
'Risk management will stop execution if critical risk is detected',
'Bot will halt all accounts if risk score becomes too high'
)
}
}
// Search delays validation
const minDelay = typeof config.searchSettings.searchDelay.min === 'string'
? config.searchSettings.searchDelay.min
: `${config.searchSettings.searchDelay.min}ms`
if (minDelay === '0' || minDelay === '0ms' || minDelay === '0s') {
this.addWarning(
'config',
'Search delay minimum is 0 - this may look suspicious',
'Consider setting a minimum delay (e.g., "1s", "2s") for more natural behavior'
)
}
}
private validateEnvironment(): void {
// Node.js version check
const nodeVersion = process.version
const major = parseInt(nodeVersion.split('.')[0]?.replace('v', '') || '0', 10)
if (major < 18) {
this.addError(
'environment',
`Node.js version ${nodeVersion} is too old`,
'Install Node.js 18 or newer. Visit https://nodejs.org/',
'docs/getting-started.md'
)
} else if (major < 20) {
this.addWarning(
'environment',
`Node.js version ${nodeVersion} is outdated`,
'Consider upgrading to Node.js 20+ for better performance and security'
)
}
// Docker-specific checks
if (process.env.FORCE_HEADLESS === '1') {
this.addWarning(
'environment',
'Running in Docker/containerized environment',
'Make sure volumes are correctly mounted for sessions persistence'
)
}
// Time sync warning for TOTP users
if (process.platform === 'linux') {
this.addWarning(
'environment',
'Linux detected: Ensure system time is synchronized',
'Run: sudo timedatectl set-ntp true (required for TOTP to work correctly)'
)
}
}
private validateFileSystem(config: Config): void {
// Check if sessions directory exists or can be created
const sessionPath = path.isAbsolute(config.sessionPath)
? config.sessionPath
: path.join(process.cwd(), config.sessionPath)
if (!fs.existsSync(sessionPath)) {
try {
fs.mkdirSync(sessionPath, { recursive: true })
this.addWarning(
'filesystem',
`Created missing sessions directory: ${sessionPath}`,
'Session data will be stored here'
)
} catch (error) {
this.addError(
'filesystem',
`Cannot create sessions directory: ${sessionPath}`,
`Check file permissions. Error: ${error instanceof Error ? error.message : String(error)}`
)
}
}
// Check job-state directory if enabled
if (config.jobState?.enabled !== false) {
const jobStateDir = config.jobState?.dir
? config.jobState.dir
: path.join(sessionPath, 'job-state')
if (!fs.existsSync(jobStateDir)) {
try {
fs.mkdirSync(jobStateDir, { recursive: true })
} catch (error) {
this.addWarning(
'filesystem',
`Cannot create job-state directory: ${jobStateDir}`,
'Job state tracking may fail. Check file permissions.'
)
}
}
}
// Check diagnostics directory if enabled
if (config.diagnostics?.enabled === true) {
const diagPath = path.join(process.cwd(), 'diagnostics')
if (!fs.existsSync(diagPath)) {
try {
fs.mkdirSync(diagPath, { recursive: true })
} catch (error) {
this.addWarning(
'filesystem',
'Cannot create diagnostics directory',
'Screenshots and HTML snapshots will not be saved'
)
}
}
}
}
private validateBrowserSettings(config: Config): void {
// Headless validation - only warn in Docker/containerized environments
if (!config.headless && process.env.FORCE_HEADLESS === '1') {
this.addWarning(
'browser',
'FORCE_HEADLESS=1 but config.headless is false',
'Docker environment forces headless mode. Your config setting will be overridden.',
'docs/docker.md'
)
}
// Fingerprinting validation
if (config.saveFingerprint?.desktop === false && config.saveFingerprint?.mobile === false) {
this.addWarning(
'browser',
'Fingerprint saving is completely disabled',
'Each run will generate new fingerprints, which may look suspicious'
)
}
}
private validateScheduleSettings(config: Config): void {
if (config.schedule?.enabled === true) {
// Time format validation
const schedRec = config.schedule as Record<string, unknown>
const useAmPm = schedRec.useAmPm
const time12 = typeof schedRec.time12 === 'string' ? schedRec.time12 : ''
const time24 = typeof schedRec.time24 === 'string' ? schedRec.time24 : ''
if (useAmPm === true && (!time12 || time12.trim() === '')) {
this.addError(
'schedule',
'Schedule enabled with useAmPm=true but time12 is missing',
'Add time12 field (e.g., "9:00 AM") or set useAmPm=false',
'docs/schedule.md'
)
}
if (useAmPm === false && (!time24 || time24.trim() === '')) {
this.addError(
'schedule',
'Schedule enabled with useAmPm=false but time24 is missing',
'Add time24 field (e.g., "09:00") or set useAmPm=true',
'docs/schedule.md'
)
}
// Timezone validation
const tz = config.schedule.timeZone || 'UTC'
try {
Intl.DateTimeFormat(undefined, { timeZone: tz })
} catch {
this.addError(
'schedule',
`Invalid timezone: ${tz}`,
'Use a valid IANA timezone (e.g., "America/New_York", "Europe/Paris")',
'docs/schedule.md'
)
}
// Vacation mode check
if (config.vacation?.enabled === true) {
if (config.vacation.minDays && config.vacation.maxDays) {
if (config.vacation.minDays > config.vacation.maxDays) {
this.addError(
'schedule',
`Vacation minDays (${config.vacation.minDays}) > maxDays (${config.vacation.maxDays})`,
'Set minDays <= maxDays (e.g., minDays: 2, maxDays: 4)'
)
}
}
}
}
}
private validateNetworkSettings(config: Config): void {
// Webhook validation
if (config.webhook?.enabled === true) {
if (!config.webhook.url || config.webhook.url.trim() === '') {
this.addError(
'network',
'Webhook enabled but URL is missing',
'Add webhook URL or set webhook.enabled=false',
'docs/config.md'
)
} else if (!config.webhook.url.startsWith('http')) {
this.addError(
'network',
`Invalid webhook URL: ${config.webhook.url}`,
'Webhook URL must start with http:// or https://'
)
}
}
// Conclusion webhook validation
if (config.conclusionWebhook?.enabled === true) {
if (!config.conclusionWebhook.url || config.conclusionWebhook.url.trim() === '') {
this.addError(
'network',
'Conclusion webhook enabled but URL is missing',
'Add conclusion webhook URL or disable it'
)
}
}
// NTFY validation
if (config.ntfy?.enabled === true) {
if (!config.ntfy.url || config.ntfy.url.trim() === '') {
this.addError(
'network',
'NTFY enabled but URL is missing',
'Add NTFY server URL or set ntfy.enabled=false',
'docs/ntfy.md'
)
}
if (!config.ntfy.topic || config.ntfy.topic.trim() === '') {
this.addError(
'network',
'NTFY enabled but topic is missing',
'Add NTFY topic name',
'docs/ntfy.md'
)
}
}
}
private validateWorkerSettings(config: Config): void {
const workers = config.workers
// Check if at least one worker is enabled
const anyEnabled = workers.doDailySet || workers.doMorePromotions || workers.doPunchCards ||
workers.doDesktopSearch || workers.doMobileSearch || workers.doDailyCheckIn ||
workers.doReadToEarn
if (!anyEnabled) {
this.addWarning(
'workers',
'All workers are disabled - bot will do nothing',
'Enable at least one worker task (doDailySet, doDesktopSearch, etc.)',
'docs/config.md'
)
}
// Mobile + desktop search check
if (!workers.doDesktopSearch && !workers.doMobileSearch) {
this.addWarning(
'workers',
'Both desktop and mobile searches are disabled',
'Enable at least one search type to earn search points'
)
}
// Bundle validation
if (workers.bundleDailySetWithSearch === true && !workers.doDesktopSearch) {
this.addWarning(
'workers',
'bundleDailySetWithSearch is enabled but doDesktopSearch is disabled',
'Desktop search will not run after Daily Set. Enable doDesktopSearch or disable bundling.'
)
}
}
private validateSearchSettings(config: Config): void {
const search = config.searchSettings
// Retry validation
if (search.retryMobileSearchAmount < 0) {
this.addWarning(
'search',
'retryMobileSearchAmount is negative',
'Set to 0 or positive number (recommended: 2-3)'
)
}
if (search.retryMobileSearchAmount > 10) {
this.addWarning(
'search',
`retryMobileSearchAmount is very high (${search.retryMobileSearchAmount})`,
'High retry count may trigger detection. Recommended: 2-3'
)
}
// Fallback validation
if (search.localFallbackCount !== undefined && search.localFallbackCount < 10) {
this.addWarning(
'search',
`localFallbackCount is low (${search.localFallbackCount})`,
'Consider at least 15-25 fallback queries for variety'
)
}
// Query diversity check
if (config.queryDiversity?.enabled === false && !config.searchOnBingLocalQueries) {
this.addWarning(
'search',
'Query diversity disabled and local queries disabled',
'Bot will only use Google Trends. Enable one query source for better variety.',
'docs/config.md'
)
}
}
private validateHumanizationSettings(config: Config): void {
const human = config.humanization
if (!human || human.enabled === false) {
this.addWarning(
'humanization',
'Humanization is completely disabled',
'This increases detection risk. Consider enabling for safer automation.',
'docs/config.md'
)
return
}
// Gesture probabilities
if (human.gestureMoveProb !== undefined) {
if (human.gestureMoveProb < 0 || human.gestureMoveProb > 1) {
this.addError(
'humanization',
`gestureMoveProb must be between 0 and 1 (got ${human.gestureMoveProb})`,
'Set a probability value between 0.0 and 1.0'
)
}
if (human.gestureMoveProb === 0) {
this.addWarning(
'humanization',
'Mouse gestures disabled (gestureMoveProb=0)',
'This may look robotic. Consider 0.3-0.7 for natural behavior.'
)
}
}
if (human.gestureScrollProb !== undefined) {
if (human.gestureScrollProb < 0 || human.gestureScrollProb > 1) {
this.addError(
'humanization',
`gestureScrollProb must be between 0 and 1 (got ${human.gestureScrollProb})`,
'Set a probability value between 0.0 and 1.0'
)
}
}
// Action delays
if (human.actionDelay) {
const minMs = typeof human.actionDelay.min === 'string'
? parseInt(human.actionDelay.min, 10)
: human.actionDelay.min
const maxMs = typeof human.actionDelay.max === 'string'
? parseInt(human.actionDelay.max, 10)
: human.actionDelay.max
if (minMs > maxMs) {
this.addError(
'humanization',
'actionDelay min is greater than max',
`Fix: min=${minMs} should be <= max=${maxMs}`
)
}
}
// Random off days
if (human.randomOffDaysPerWeek !== undefined) {
if (human.randomOffDaysPerWeek < 0 || human.randomOffDaysPerWeek > 7) {
this.addError(
'humanization',
`randomOffDaysPerWeek must be 0-7 (got ${human.randomOffDaysPerWeek})`,
'Set to a value between 0 (no off days) and 7 (always off)'
)
}
}
// Allowed windows validation
if (human.allowedWindows && Array.isArray(human.allowedWindows)) {
human.allowedWindows.forEach((window, idx) => {
if (typeof window !== 'string') {
this.addError(
'humanization',
`allowedWindows[${idx}] is not a string`,
'Format: "HH:mm-HH:mm" (e.g., "09:00-17:00")'
)
} else if (!/^\d{2}:\d{2}-\d{2}:\d{2}$/.test(window)) {
this.addWarning(
'humanization',
`allowedWindows[${idx}] format may be invalid: "${window}"`,
'Expected format: "HH:mm-HH:mm" (24-hour, e.g., "09:00-17:00")'
)
}
})
}
}
private validateSecuritySettings(config: Config): void {
// Check logging redaction
const logging = config.logging as { redactEmails?: boolean } | undefined
if (logging && logging.redactEmails === false) {
this.addWarning(
'security',
'Email redaction is disabled in logs',
'Enable redactEmails=true if you share logs publicly',
'docs/security.md'
)
}
// Removed diagnostics warning - reports/ folder with masked emails is safe for debugging
// Proxy exposure check
if (config.proxy?.proxyGoogleTrends === false && config.proxy?.proxyBingTerms === false) {
this.addWarning(
'security',
'All external API calls will use your real IP',
'Consider enabling proxy for Google Trends or Bing Terms to mask your IP'
)
}
// Crash recovery
if (config.crashRecovery?.autoRestart === true) {
const maxRestarts = config.crashRecovery.maxRestarts ?? 2
if (maxRestarts > 5) {
this.addWarning(
'security',
`Crash recovery maxRestarts is high (${maxRestarts})`,
'Excessive restarts on errors may trigger rate limits or detection'
)
}
}
}
private addError(category: string, message: string, fix?: string, docsLink?: string): void {
this.errors.push({ severity: 'error', category, message, fix, docsLink })
}
private addWarning(category: string, message: string, fix?: string, docsLink?: string): void {
this.warnings.push({ severity: 'warning', category, message, fix, docsLink })
}
private async displayResults(): Promise<void> {
// Display errors
if (this.errors.length > 0) {
console.log(chalk.red('\n❌ VALIDATION ERRORS FOUND:\n'))
this.errors.forEach((err, index) => {
console.log(chalk.red(` ${index + 1}. [${err.category.toUpperCase()}] ${err.message}`))
if (err.fix) {
console.log(chalk.yellow(` 💡 Fix: ${err.fix}`))
}
if (err.docsLink) {
console.log(chalk.cyan(` 📖 Documentation: ${err.docsLink}`))
}
console.log('')
})
}
// Display warnings
if (this.warnings.length > 0) {
console.log(chalk.yellow('\n⚠ WARNINGS:\n'))
this.warnings.forEach((warn, index) => {
console.log(chalk.yellow(` ${index + 1}. [${warn.category.toUpperCase()}] ${warn.message}`))
if (warn.fix) {
console.log(chalk.gray(` 💡 Suggestion: ${warn.fix}`))
}
if (warn.docsLink) {
console.log(chalk.cyan(` 📖 Documentation: ${warn.docsLink}`))
}
console.log('')
})
}
// Summary
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════'))
if (this.errors.length === 0 && this.warnings.length === 0) {
console.log(chalk.green(' ✅ All validation checks passed! Configuration looks good.'))
console.log(chalk.gray(' → Starting bot execution...'))
} else {
console.log(chalk.white(` Found: ${chalk.red(`${this.errors.length} error(s)`)} | ${chalk.yellow(`${this.warnings.length} warning(s)`)}`))
if (this.errors.length > 0) {
console.log(chalk.red('\n ⚠️ CRITICAL ERRORS DETECTED'))
console.log(chalk.white(' → Bot will continue, but these issues may cause failures'))
console.log(chalk.white(' → Review errors above and fix them for stable operation'))
console.log(chalk.gray(' → If you believe these are false positives, you can ignore them'))
} else {
console.log(chalk.yellow('\n ⚠️ Warnings detected - review recommended'))
console.log(chalk.gray(' → Bot will continue normally'))
}
console.log(chalk.white('\n 📖 Full documentation: docs/index.md'))
console.log(chalk.gray(' → Proceeding with execution in 5 seconds...'))
// Give user time to read (5 seconds for errors, 5 seconds for warnings)
await new Promise(resolve => setTimeout(resolve, 5000))
}
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'))
}
}

84
src/util/Totp.ts Normal file
View File

@@ -0,0 +1,84 @@
import crypto from 'crypto'
/**
* Decode Base32 (RFC 4648) to a Buffer.
* Accepts lowercase/uppercase, optional padding.
*/
function base32Decode(input: string): Buffer {
const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'
const clean = input.toUpperCase().replace(/=+$/g, '').replace(/[^A-Z2-7]/g, '')
let bits = 0
let value = 0
const bytes: number[] = []
for (const char of clean) {
const idx = alphabet.indexOf(char)
if (idx < 0) continue
value = (value << 5) | idx
bits += 5
if (bits >= 8) {
bits -= 8
bytes.push((value >>> bits) & 0xff)
}
}
return Buffer.from(bytes)
}
/**
* Generate an HMAC using Node's crypto and return Buffer.
*/
function hmac(algorithm: string, key: Buffer, data: Buffer): Buffer {
return crypto.createHmac(algorithm, key).update(data).digest()
}
export type TotpOptions = { digits?: number; step?: number; algorithm?: 'SHA1' | 'SHA256' | 'SHA512' }
/**
* Generate TOTP per RFC 6238.
* @param secretBase32 - shared secret in Base32
* @param time - Unix time in seconds (defaults to now)
* @param options - { digits, step, algorithm }
* @returns numeric TOTP as string (zero-padded)
*/
export function generateTOTP(
secretBase32: string,
time: number = Math.floor(Date.now() / 1000),
options?: TotpOptions
): string {
const digits = options?.digits ?? 6
const step = options?.step ?? 30
const alg = (options?.algorithm ?? 'SHA1').toUpperCase()
const key = base32Decode(secretBase32)
const counter = Math.floor(time / step)
// 8-byte big-endian counter
const counterBuffer = Buffer.alloc(8)
counterBuffer.writeBigUInt64BE(BigInt(counter), 0)
let hmacAlg: string
if (alg === 'SHA1') hmacAlg = 'sha1'
else if (alg === 'SHA256') hmacAlg = 'sha256'
else if (alg === 'SHA512') hmacAlg = 'sha512'
else throw new Error('Unsupported algorithm. Use SHA1, SHA256 or SHA512.')
const hash = hmac(hmacAlg, key, counterBuffer)
if (!hash || hash.length < 20) {
// Minimal sanity check; for SHA1 length is 20
throw new Error('Invalid HMAC output for TOTP')
}
// Dynamic truncation
const offset = hash[hash.length - 1]! & 0x0f
if (offset + 3 >= hash.length) {
throw new Error('Invalid dynamic truncation offset')
}
const code =
((hash[offset]! & 0x7f) << 24) |
((hash[offset + 1]! & 0xff) << 16) |
((hash[offset + 2]! & 0xff) << 8) |
(hash[offset + 3]! & 0xff)
const otp = (code % 10 ** digits).toString().padStart(digits, '0')
return otp
}

300
src/util/UserAgent.ts Normal file
View File

@@ -0,0 +1,300 @@
import axios from 'axios'
import { BrowserFingerprintWithHeaders } from 'fingerprint-generator'
import { log } from './Logger'
import Retry from './Retry'
import { ChromeVersion, EdgeVersion, Architecture, Platform } from '../interface/UserAgentUtil'
const NOT_A_BRAND_VERSION = '99'
const EDGE_VERSION_URL = 'https://edgeupdates.microsoft.com/api/products'
const EDGE_VERSION_CACHE_TTL_MS = 1000 * 60 * 60
// Static fallback versions (updated periodically, valid as of October 2024)
const FALLBACK_EDGE_VERSIONS: EdgeVersionResult = {
android: '130.0.2849.66',
windows: '130.0.2849.68'
}
type EdgeVersionResult = {
android?: string
windows?: string
}
let edgeVersionCache: { data: EdgeVersionResult; expiresAt: number } | null = null
let edgeVersionInFlight: Promise<EdgeVersionResult> | null = null
export async function getUserAgent(isMobile: boolean) {
const system = getSystemComponents(isMobile)
const app = await getAppComponents(isMobile)
const uaTemplate = isMobile ?
`Mozilla/5.0 (${system}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/${app.chrome_reduced_version} Mobile Safari/537.36 EdgA/${app.edge_version}` :
`Mozilla/5.0 (${system}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/${app.chrome_reduced_version} Safari/537.36 Edg/${app.edge_version}`
const platformVersion = `${isMobile ? Math.floor(Math.random() * 5) + 9 : Math.floor(Math.random() * 15) + 1}.0.0`
const uaMetadata = {
mobile: isMobile,
isMobile,
platform: isMobile ? 'Android' : 'Windows',
fullVersionList: [
{ brand: 'Not/A)Brand', version: `${NOT_A_BRAND_VERSION}.0.0.0` },
{ brand: 'Microsoft Edge', version: app['edge_version'] },
{ brand: 'Chromium', version: app['chrome_version'] }
],
brands: [
{ brand: 'Not/A)Brand', version: NOT_A_BRAND_VERSION },
{ brand: 'Microsoft Edge', version: app['edge_major_version'] },
{ brand: 'Chromium', version: app['chrome_major_version'] }
],
platformVersion,
architecture: isMobile ? '' : 'x86',
bitness: isMobile ? '' : '64',
model: '',
uaFullVersion: app['chrome_version']
}
return { userAgent: uaTemplate, userAgentMetadata: uaMetadata }
}
export async function getChromeVersion(isMobile: boolean): Promise<string> {
try {
const request = {
url: 'https://googlechromelabs.github.io/chrome-for-testing/last-known-good-versions.json',
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
}
const response = await axios(request)
const data: ChromeVersion = response.data
return data.channels.Stable.version
} catch (error) {
throw log(isMobile, 'USERAGENT-CHROME-VERSION', 'An error occurred:' + error, 'error')
}
}
export async function getEdgeVersions(isMobile: boolean): Promise<EdgeVersionResult> {
const now = Date.now()
// Return cached version if still valid
if (edgeVersionCache && edgeVersionCache.expiresAt > now) {
return edgeVersionCache.data
}
// Wait for in-flight request if one exists
if (edgeVersionInFlight) {
try {
return await edgeVersionInFlight
} catch (error) {
if (edgeVersionCache) {
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using cached Edge versions after in-flight failure', 'warn')
return edgeVersionCache.data
}
// Fall through to fetch attempt below
}
}
// Attempt to fetch fresh versions
const fetchPromise = fetchEdgeVersionsWithRetry(isMobile)
.then(result => {
edgeVersionCache = { data: result, expiresAt: Date.now() + EDGE_VERSION_CACHE_TTL_MS }
edgeVersionInFlight = null
return result
})
.catch(() => {
edgeVersionInFlight = null
// Try stale cache first
if (edgeVersionCache) {
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using stale cached Edge versions due to fetch failure', 'warn')
return edgeVersionCache.data
}
// Fall back to static versions
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Using static fallback Edge versions (API unavailable)', 'warn')
edgeVersionCache = { data: FALLBACK_EDGE_VERSIONS, expiresAt: Date.now() + EDGE_VERSION_CACHE_TTL_MS }
return FALLBACK_EDGE_VERSIONS
})
edgeVersionInFlight = fetchPromise
return fetchPromise
}
export function getSystemComponents(mobile: boolean): string {
if (mobile) {
const androidVersion = 10 + Math.floor(Math.random() * 5)
return `Linux; Android ${androidVersion}; K`
}
return 'Windows NT 10.0; Win64; x64'
}
export async function getAppComponents(isMobile: boolean) {
const versions = await getEdgeVersions(isMobile)
const edgeVersion = isMobile ? versions.android : versions.windows as string
const edgeMajorVersion = edgeVersion?.split('.')[0]
const chromeVersion = await getChromeVersion(isMobile)
const chromeMajorVersion = chromeVersion?.split('.')[0]
const chromeReducedVersion = `${chromeMajorVersion}.0.0.0`
return {
not_a_brand_version: `${NOT_A_BRAND_VERSION}.0.0.0`,
not_a_brand_major_version: NOT_A_BRAND_VERSION,
edge_version: edgeVersion as string,
edge_major_version: edgeMajorVersion as string,
chrome_version: chromeVersion as string,
chrome_major_version: chromeMajorVersion as string,
chrome_reduced_version: chromeReducedVersion as string
}
}
async function fetchEdgeVersionsWithRetry(isMobile: boolean): Promise<EdgeVersionResult> {
const retry = new Retry()
return retry.run(async () => {
const versions = await fetchEdgeVersionsOnce(isMobile)
if (!versions.android && !versions.windows) {
throw new Error('Stable Edge releases did not include Android or Windows versions')
}
return versions
}, () => true)
}
async function fetchEdgeVersionsOnce(isMobile: boolean): Promise<EdgeVersionResult> {
let lastError: unknown = null
// Try axios first
try {
const response = await axios<EdgeVersion[]>({
url: EDGE_VERSION_URL,
method: 'GET',
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
},
timeout: 10000,
validateStatus: (status) => status === 200
})
if (!response.data || !Array.isArray(response.data)) {
throw new Error('Invalid response format from Edge API')
}
return mapEdgeVersions(response.data)
} catch (axiosError) {
lastError = axiosError
// Continue to fallback
}
// Try native fetch as fallback
try {
const fallback = await tryNativeFetchFallback()
if (fallback) {
log(isMobile, 'USERAGENT-EDGE-VERSION', 'Axios failed, using native fetch fallback', 'warn')
return fallback
}
} catch (fetchError) {
lastError = fetchError
}
// Both methods failed
const errorMsg = lastError instanceof Error ? lastError.message : String(lastError)
throw new Error(`Failed to fetch Edge versions: ${errorMsg}`)
}
async function tryNativeFetchFallback(): Promise<EdgeVersionResult | null> {
let timeoutHandle: NodeJS.Timeout | undefined
try {
const controller = new AbortController()
timeoutHandle = setTimeout(() => controller.abort(), 10000)
const response = await fetch(EDGE_VERSION_URL, {
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
},
signal: controller.signal
})
clearTimeout(timeoutHandle)
timeoutHandle = undefined
if (!response.ok) {
throw new Error(`HTTP ${response.status}`)
}
const data = await response.json() as EdgeVersion[]
if (!Array.isArray(data)) {
throw new Error('Invalid response format')
}
return mapEdgeVersions(data)
} catch (error) {
if (timeoutHandle) clearTimeout(timeoutHandle)
return null
}
}
function mapEdgeVersions(data: EdgeVersion[]): EdgeVersionResult {
if (!Array.isArray(data) || data.length === 0) {
throw new Error('Edge API returned empty or invalid data')
}
const stable = data.find(entry => entry?.Product?.toLowerCase() === 'stable')
?? data.find(entry => entry?.Product && /stable/i.test(entry.Product))
if (!stable || !stable.Releases || !Array.isArray(stable.Releases)) {
throw new Error('Stable Edge channel not found or invalid format')
}
const androidRelease = stable.Releases.find(release =>
release?.Platform === Platform.Android && release?.ProductVersion
)
const windowsRelease = stable.Releases.find(release =>
release?.Platform === Platform.Windows &&
release?.Architecture === Architecture.X64 &&
release?.ProductVersion
) ?? stable.Releases.find(release =>
release?.Platform === Platform.Windows &&
release?.ProductVersion
)
const result: EdgeVersionResult = {
android: androidRelease?.ProductVersion,
windows: windowsRelease?.ProductVersion
}
// Validate at least one version was found
if (!result.android && !result.windows) {
throw new Error('No valid Edge versions found in API response')
}
return result
}
export async function updateFingerprintUserAgent(fingerprint: BrowserFingerprintWithHeaders, isMobile: boolean): Promise<BrowserFingerprintWithHeaders> {
try {
const userAgentData = await getUserAgent(isMobile)
const componentData = await getAppComponents(isMobile)
fingerprint.fingerprint.navigator.userAgentData = userAgentData.userAgentMetadata
fingerprint.fingerprint.navigator.userAgent = userAgentData.userAgent
fingerprint.fingerprint.navigator.appVersion = userAgentData.userAgent.replace(`${fingerprint.fingerprint.navigator.appCodeName}/`, '')
fingerprint.headers['user-agent'] = userAgentData.userAgent
fingerprint.headers['sec-ch-ua'] = `"Microsoft Edge";v="${componentData.edge_major_version}", "Not=A?Brand";v="${componentData.not_a_brand_major_version}", "Chromium";v="${componentData.chrome_major_version}"`
fingerprint.headers['sec-ch-ua-full-version-list'] = `"Microsoft Edge";v="${componentData.edge_version}", "Not=A?Brand";v="${componentData.not_a_brand_version}", "Chromium";v="${componentData.chrome_version}"`
return fingerprint
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
log(isMobile, 'USER-AGENT-UPDATE', `Failed to update fingerprint: ${errorMsg}`, 'error')
throw new Error(`User-Agent update failed: ${errorMsg}`)
}
}

73
src/util/Utils.ts Normal file
View File

@@ -0,0 +1,73 @@
import ms from 'ms'
export default class Util {
async wait(ms: number): Promise<void> {
// Safety check: prevent extremely long or negative waits
const MAX_WAIT_MS = 3600000 // 1 hour max
const safeMs = Math.min(Math.max(0, ms), MAX_WAIT_MS)
if (ms !== safeMs) {
console.warn(`[Utils] wait() clamped from ${ms}ms to ${safeMs}ms (max: ${MAX_WAIT_MS}ms)`)
}
return new Promise<void>((resolve) => {
setTimeout(resolve, safeMs)
})
}
async waitRandom(minMs: number, maxMs: number): Promise<void> {
const delta = this.randomNumber(minMs, maxMs)
return this.wait(delta)
}
getFormattedDate(ms = Date.now()): string {
const today = new Date(ms)
const month = String(today.getMonth() + 1).padStart(2, '0') // January is 0
const day = String(today.getDate()).padStart(2, '0')
const year = today.getFullYear()
return `${month}/${day}/${year}`
}
shuffleArray<T>(array: T[]): T[] {
return array.map(value => ({ value, sort: Math.random() }))
.sort((a, b) => a.sort - b.sort)
.map(({ value }) => value)
}
randomNumber(min: number, max: number): number {
return Math.floor(Math.random() * (max - min + 1)) + min
}
chunkArray<T>(arr: T[], numChunks: number): T[][] {
// Validate input to prevent division by zero or invalid chunks
if (numChunks <= 0) {
throw new Error(`Invalid numChunks: ${numChunks}. Must be a positive integer.`)
}
if (arr.length === 0) {
return []
}
const safeNumChunks = Math.max(1, Math.floor(numChunks))
const chunkSize = Math.ceil(arr.length / safeNumChunks)
const chunks: T[][] = []
for (let i = 0; i < arr.length; i += chunkSize) {
const chunk = arr.slice(i, i + chunkSize)
chunks.push(chunk)
}
return chunks
}
stringToMs(input: string | number): number {
const milisec = ms(input.toString())
if (!milisec) {
throw new Error('The string provided cannot be parsed to a valid time! Use a format like "1 min", "1m" or "1 minutes"')
}
return milisec
}
}