mirror of
https://github.com/LightZirconite/Microsoft-Rewards-Bot.git
synced 2026-01-18 12:43:57 +00:00
New structure
This commit is contained in:
161
src/util/network/Axios.ts
Normal file
161
src/util/network/Axios.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import axios, { AxiosError, AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'
|
||||
import { HttpProxyAgent } from 'http-proxy-agent'
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
import { AccountProxy } from '../../interface/Account'
|
||||
|
||||
class AxiosClient {
|
||||
private instance: AxiosInstance
|
||||
private account: AccountProxy
|
||||
|
||||
constructor(account: AccountProxy) {
|
||||
this.account = account
|
||||
this.instance = axios.create()
|
||||
|
||||
// If a proxy configuration is provided, set up the agent
|
||||
if (this.account.url && this.account.proxyAxios) {
|
||||
const agent = this.getAgentForProxy(this.account)
|
||||
this.instance.defaults.httpAgent = agent
|
||||
this.instance.defaults.httpsAgent = agent
|
||||
}
|
||||
}
|
||||
|
||||
private getAgentForProxy(proxyConfig: AccountProxy): HttpProxyAgent<string> | HttpsProxyAgent<string> | SocksProxyAgent {
|
||||
const { proxyUrl, protocol } = this.buildProxyUrl(proxyConfig)
|
||||
const normalized = protocol.replace(/:$/, '').toLowerCase()
|
||||
|
||||
switch (normalized) {
|
||||
case 'http':
|
||||
return new HttpProxyAgent(proxyUrl)
|
||||
case 'https':
|
||||
return new HttpsProxyAgent(proxyUrl)
|
||||
case 'socks':
|
||||
case 'socks4':
|
||||
case 'socks5':
|
||||
return new SocksProxyAgent(proxyUrl)
|
||||
default:
|
||||
throw new Error(`Unsupported proxy protocol in "${proxyConfig.url}". Supported: http://, https://, socks://, socks4://, socks5://`)
|
||||
}
|
||||
}
|
||||
|
||||
private buildProxyUrl(proxyConfig: AccountProxy): { proxyUrl: string; protocol: string } {
|
||||
const { url, port, username, password } = proxyConfig
|
||||
|
||||
if (!url) {
|
||||
throw new Error('Proxy URL is required when proxyAxios is enabled.')
|
||||
}
|
||||
|
||||
const hasScheme = /^[a-zA-Z][a-zA-Z0-9+.-]*:/.test(url)
|
||||
const candidate = hasScheme ? url : `http://${url}`
|
||||
|
||||
let parsedUrl: URL
|
||||
try {
|
||||
parsedUrl = new URL(candidate)
|
||||
} catch (err) {
|
||||
throw new Error(`Invalid proxy URL "${url}": ${(err as Error).message}`)
|
||||
}
|
||||
|
||||
const protocol = parsedUrl.protocol.replace(/:$/, '')
|
||||
const allowed = new Set(['http', 'https', 'socks', 'socks4', 'socks5'])
|
||||
if (!allowed.has(protocol)) {
|
||||
throw new Error(`Unsupported proxy protocol in "${url}". Supported: http://, https://, socks://, socks4://, socks5://`)
|
||||
}
|
||||
|
||||
if (!parsedUrl.port) {
|
||||
if (port) {
|
||||
parsedUrl.port = String(port)
|
||||
} else {
|
||||
throw new Error(`Proxy port missing for "${url}". Provide a port value.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (username) {
|
||||
parsedUrl.username = encodeURIComponent(username)
|
||||
}
|
||||
|
||||
if (password) {
|
||||
parsedUrl.password = encodeURIComponent(password)
|
||||
}
|
||||
|
||||
return { proxyUrl: parsedUrl.toString(), protocol: parsedUrl.protocol }
|
||||
}
|
||||
|
||||
// Generic method to make any Axios request with retry logic
|
||||
public async request(config: AxiosRequestConfig, bypassProxy = false): Promise<AxiosResponse> {
|
||||
if (bypassProxy) {
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
// FIXED: Initialize lastError to prevent throwing undefined
|
||||
let lastError: unknown = new Error('Request failed with unknown error')
|
||||
const maxAttempts = 2
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
return await this.instance.request(config)
|
||||
} catch (err: unknown) {
|
||||
lastError = err
|
||||
|
||||
// Handle HTTP 407 Proxy Authentication Required
|
||||
if (this.isProxyAuthError(err)) {
|
||||
// Retry without proxy on auth failure
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
// Handle retryable network errors
|
||||
if (this.isRetryableError(err)) {
|
||||
if (attempt < maxAttempts) {
|
||||
// Exponential backoff: 1s, 2s, 4s, etc.
|
||||
const delayMs = 1000 * Math.pow(2, attempt - 1)
|
||||
await this.sleep(delayMs)
|
||||
continue
|
||||
}
|
||||
// Last attempt: try without proxy
|
||||
const bypassInstance = axios.create()
|
||||
return bypassInstance.request(config)
|
||||
}
|
||||
|
||||
// Non-retryable error
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if error is HTTP 407 Proxy Authentication Required
|
||||
*/
|
||||
private isProxyAuthError(err: unknown): boolean {
|
||||
const axiosErr = err as AxiosError | undefined
|
||||
return axiosErr?.response?.status === 407
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if error is retryable (network/proxy issues)
|
||||
*/
|
||||
private isRetryableError(err: unknown): boolean {
|
||||
const e = err as { code?: string; cause?: { code?: string }; message?: string } | undefined
|
||||
if (!e) return false
|
||||
|
||||
const code = e.code || e.cause?.code
|
||||
const isNetworkError = code === 'ECONNREFUSED' ||
|
||||
code === 'ETIMEDOUT' ||
|
||||
code === 'ECONNRESET' ||
|
||||
code === 'ENOTFOUND' ||
|
||||
code === 'EPIPE'
|
||||
|
||||
const msg = String(e.message || '')
|
||||
const isProxyIssue = /proxy|tunnel|socks|agent/i.test(msg)
|
||||
|
||||
return isNetworkError || isProxyIssue
|
||||
}
|
||||
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
}
|
||||
|
||||
export default AxiosClient
|
||||
342
src/util/network/QueryDiversityEngine.ts
Normal file
342
src/util/network/QueryDiversityEngine.ts
Normal file
@@ -0,0 +1,342 @@
|
||||
import axios from 'axios'
|
||||
import { Util } from '../core/Utils'
|
||||
|
||||
export interface QueryDiversityConfig {
|
||||
sources: Array<'google-trends' | 'reddit' | 'news' | 'wikipedia' | 'local-fallback'>
|
||||
deduplicate: boolean
|
||||
mixStrategies: boolean
|
||||
maxQueriesPerSource: number
|
||||
cacheMinutes: number
|
||||
}
|
||||
|
||||
/**
|
||||
* QueryDiversityEngine fetches search queries from multiple sources to avoid patterns.
|
||||
* Supports Google Trends, Reddit, News APIs, Wikipedia, and local fallbacks.
|
||||
*/
|
||||
export class QueryDiversityEngine {
|
||||
private config: QueryDiversityConfig
|
||||
private cache: Map<string, { queries: string[]; expires: number }> = new Map()
|
||||
private util: Util = new Util()
|
||||
private logger?: (source: string, message: string, level?: 'info' | 'warn' | 'error') => void
|
||||
|
||||
constructor(config?: Partial<QueryDiversityConfig>, logger?: (source: string, message: string, level?: 'info' | 'warn' | 'error') => void) {
|
||||
const maxQueriesPerSource = Math.max(1, Math.min(config?.maxQueriesPerSource || 10, 50))
|
||||
const cacheMinutes = Math.max(1, Math.min(config?.cacheMinutes || 30, 1440))
|
||||
|
||||
this.config = {
|
||||
sources: config?.sources && config.sources.length > 0
|
||||
? config.sources
|
||||
: ['google-trends', 'reddit', 'local-fallback'],
|
||||
deduplicate: config?.deduplicate !== false,
|
||||
mixStrategies: config?.mixStrategies !== false,
|
||||
maxQueriesPerSource,
|
||||
cacheMinutes
|
||||
}
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
private log(source: string, message: string, level: 'info' | 'warn' | 'error' = 'info'): void {
|
||||
if (this.logger) {
|
||||
this.logger(source, message, level)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic HTTP fetch with error handling and timeout
|
||||
*/
|
||||
private async fetchHttp(url: string, config?: {
|
||||
method?: 'GET' | 'POST'
|
||||
headers?: Record<string, string>
|
||||
data?: string
|
||||
timeout?: number
|
||||
}): Promise<string> {
|
||||
try {
|
||||
const response = await axios({
|
||||
url,
|
||||
method: config?.method || 'GET',
|
||||
headers: config?.headers || { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36' },
|
||||
data: config?.data,
|
||||
timeout: config?.timeout || 10000
|
||||
})
|
||||
return typeof response.data === 'string' ? response.data : JSON.stringify(response.data)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
this.log('QUERY-FETCH', `HTTP request failed for ${url}: ${errorMsg}`, 'error')
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch diverse queries from configured sources
|
||||
*/
|
||||
async fetchQueries(count: number): Promise<string[]> {
|
||||
const validCount = Math.max(1, Math.min(count, 200))
|
||||
const allQueries: string[] = []
|
||||
|
||||
for (const sourceName of this.config.sources) {
|
||||
try {
|
||||
const queries = await this.getFromSource(sourceName)
|
||||
allQueries.push(...queries.slice(0, this.config.maxQueriesPerSource))
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
this.log('QUERY-DIVERSITY', `Failed to fetch from ${sourceName}: ${errorMsg}`, 'warn')
|
||||
}
|
||||
}
|
||||
|
||||
let final = this.config.deduplicate ? Array.from(new Set(allQueries)) : allQueries
|
||||
|
||||
if (this.config.mixStrategies && this.config.sources.length > 1) {
|
||||
final = this.interleaveQueries(final, validCount)
|
||||
}
|
||||
|
||||
final = this.util.shuffleArray(final).slice(0, validCount)
|
||||
|
||||
if (final.length === 0) {
|
||||
this.log('QUERY-DIVERSITY', 'All sources failed, using local fallback', 'warn')
|
||||
return this.getLocalFallback(validCount)
|
||||
}
|
||||
|
||||
return final
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from a specific source with caching
|
||||
*/
|
||||
private async getFromSource(source: string): Promise<string[]> {
|
||||
this.cleanExpiredCache()
|
||||
|
||||
const cached = this.cache.get(source)
|
||||
if (cached && Date.now() < cached.expires) {
|
||||
return cached.queries
|
||||
}
|
||||
|
||||
let queries: string[] = []
|
||||
|
||||
switch (source) {
|
||||
case 'google-trends':
|
||||
queries = await this.fetchGoogleTrends()
|
||||
break
|
||||
case 'reddit':
|
||||
queries = await this.fetchReddit()
|
||||
break
|
||||
case 'news':
|
||||
queries = await this.fetchNews()
|
||||
break
|
||||
case 'wikipedia':
|
||||
queries = await this.fetchWikipedia()
|
||||
break
|
||||
case 'local-fallback':
|
||||
queries = this.getLocalFallback(20)
|
||||
break
|
||||
default:
|
||||
this.log('QUERY-DIVERSITY', `Unknown source: ${source}`, 'warn')
|
||||
break
|
||||
}
|
||||
|
||||
if (queries.length > 0) {
|
||||
this.cache.set(source, {
|
||||
queries,
|
||||
expires: Date.now() + (this.config.cacheMinutes * 60000)
|
||||
})
|
||||
}
|
||||
|
||||
return queries
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Google Trends (existing logic can be reused)
|
||||
*/
|
||||
private async fetchGoogleTrends(): Promise<string[]> {
|
||||
try {
|
||||
const data = await this.fetchHttp('https://trends.google.com/trends/api/dailytrends?geo=US')
|
||||
const cleaned = data.toString().replace(')]}\',', '')
|
||||
const parsed = JSON.parse(cleaned)
|
||||
|
||||
const queries: string[] = []
|
||||
for (const item of parsed.default.trendingSearchesDays || []) {
|
||||
for (const search of item.trendingSearches || []) {
|
||||
if (search.title?.query) {
|
||||
queries.push(search.title.query)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return queries.slice(0, 20)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Reddit (top posts from popular subreddits)
|
||||
*/
|
||||
private async fetchReddit(): Promise<string[]> {
|
||||
try {
|
||||
const subreddits = ['news', 'worldnews', 'todayilearned', 'askreddit', 'technology']
|
||||
const randomSub = subreddits[Math.floor(Math.random() * subreddits.length)]
|
||||
|
||||
const data = await this.fetchHttp(`https://www.reddit.com/r/${randomSub}/hot.json?limit=15`)
|
||||
const parsed = JSON.parse(data)
|
||||
const posts = parsed.data?.children || []
|
||||
const queries: string[] = []
|
||||
|
||||
for (const post of posts) {
|
||||
const title = post.data?.title
|
||||
if (title && title.length > 10 && title.length < 100) {
|
||||
queries.push(title)
|
||||
}
|
||||
}
|
||||
|
||||
return queries
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from News API (requires API key - fallback to headlines scraping)
|
||||
*/
|
||||
private async fetchNews(): Promise<string[]> {
|
||||
try {
|
||||
const apiKey = process.env.NEWS_API_KEY
|
||||
if (!apiKey) {
|
||||
return this.fetchNewsFallback()
|
||||
}
|
||||
|
||||
const data = await this.fetchHttp(`https://newsapi.org/v2/top-headlines?country=us&pageSize=15&apiKey=${apiKey}`)
|
||||
const parsed = JSON.parse(data)
|
||||
const articles = parsed.articles || []
|
||||
return articles.map((a: { title?: string }) => a.title).filter((t: string | undefined) => t && t.length > 10)
|
||||
} catch {
|
||||
return this.fetchNewsFallback()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fallback news scraper (BBC/CNN headlines)
|
||||
*/
|
||||
private async fetchNewsFallback(): Promise<string[]> {
|
||||
try {
|
||||
const html = await this.fetchHttp('https://www.bbc.com/news')
|
||||
const regex = /<h3[^>]*>(.*?)<\/h3>/gi
|
||||
const matches: RegExpMatchArray[] = []
|
||||
let match
|
||||
while ((match = regex.exec(html)) !== null) {
|
||||
matches.push(match)
|
||||
}
|
||||
|
||||
return matches
|
||||
.map(m => m[1]?.replace(/<[^>]+>/g, '').trim())
|
||||
.filter((t: string | undefined) => t && t.length > 10 && t.length < 100)
|
||||
.slice(0, 10) as string[]
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from Wikipedia (featured articles / trending topics)
|
||||
*/
|
||||
private async fetchWikipedia(): Promise<string[]> {
|
||||
try {
|
||||
const data = await this.fetchHttp('https://en.wikipedia.org/w/api.php?action=query&list=random&rnnamespace=0&rnlimit=15&format=json')
|
||||
const parsed = JSON.parse(data)
|
||||
const pages = parsed.query?.random || []
|
||||
return pages.map((p: { title?: string }) => p.title).filter((t: string | undefined) => t && t.length > 3)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Local fallback queries (curated list)
|
||||
*/
|
||||
private getLocalFallback(count: number): string[] {
|
||||
const fallback = [
|
||||
'weather forecast',
|
||||
'news today',
|
||||
'stock market',
|
||||
'sports scores',
|
||||
'movie reviews',
|
||||
'recipes',
|
||||
'travel destinations',
|
||||
'health tips',
|
||||
'technology news',
|
||||
'best restaurants near me',
|
||||
'how to cook pasta',
|
||||
'python tutorial',
|
||||
'world events',
|
||||
'climate change',
|
||||
'electric vehicles',
|
||||
'space exploration',
|
||||
'artificial intelligence',
|
||||
'cryptocurrency',
|
||||
'gaming news',
|
||||
'fashion trends',
|
||||
'fitness workout',
|
||||
'home improvement',
|
||||
'gardening tips',
|
||||
'pet care',
|
||||
'book recommendations',
|
||||
'music charts',
|
||||
'streaming shows',
|
||||
'historical events',
|
||||
'science discoveries',
|
||||
'education resources'
|
||||
]
|
||||
|
||||
return this.util.shuffleArray(fallback).slice(0, count)
|
||||
}
|
||||
|
||||
/**
|
||||
* Interleave queries from different sources for diversity
|
||||
* Uses a simple round-robin approach based on order of sources in config
|
||||
*/
|
||||
private interleaveQueries(queries: string[], targetCount: number): string[] {
|
||||
const result: string[] = []
|
||||
const queriesPerSource = Math.ceil(this.config.maxQueriesPerSource)
|
||||
const sourceCount = this.config.sources.length
|
||||
|
||||
if (sourceCount === 0 || queries.length === 0) {
|
||||
return queries.slice(0, targetCount)
|
||||
}
|
||||
|
||||
const chunkSize = queriesPerSource
|
||||
let sourceIndex = 0
|
||||
|
||||
for (let i = 0; i < queries.length && result.length < targetCount; i++) {
|
||||
const currentChunkStart = sourceIndex * chunkSize
|
||||
const currentChunkEnd = currentChunkStart + chunkSize
|
||||
const query = queries[i]
|
||||
|
||||
if (query && i >= currentChunkStart && i < currentChunkEnd) {
|
||||
result.push(query)
|
||||
}
|
||||
|
||||
if (i === currentChunkEnd - 1) {
|
||||
sourceIndex = (sourceIndex + 1) % sourceCount
|
||||
}
|
||||
}
|
||||
|
||||
return result.slice(0, targetCount)
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cache (call between runs)
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cache.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean expired entries from cache automatically
|
||||
*/
|
||||
private cleanExpiredCache(): void {
|
||||
const now = Date.now()
|
||||
for (const [key, value] of this.cache.entries()) {
|
||||
if (now >= value.expires) {
|
||||
this.cache.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user