Formatting + General Improvements

- Added cookie header function
- Added failback to html parsing for dashboard data
- Removed some logs
This commit is contained in:
TheNetsky
2026-01-21 22:48:28 +01:00
parent 7e51bff52b
commit 175ba45543
23 changed files with 252 additions and 390 deletions

View File

@@ -1,51 +1,51 @@
name: Build and Push Docker Image on Release
on:
release:
types: [published]
workflow_dispatch:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
name: Build and Push Docker Image on Release
on:
release:
types: [published]
workflow_dispatch:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

143
README.md
View File

@@ -3,6 +3,7 @@
---
## Table of Contents
- [Quick Setup](#quick-setup)
- [Nix Setup](#nix-setup)
- [Configuration Options](#configuration-options)
@@ -11,20 +12,25 @@
- [Disclaimer](#disclaimer)
---
## Quick Setup
**Requirements:** Node.js >= 24 and Git
Works on Windows, Linux, macOS, and WSL.
### Get the script
```bash
git clone https://github.com/TheNetsky/Microsoft-Rewards-Script.git
cd Microsoft-Rewards-Script
```
Or, download the latest release ZIP and extract it.
### Create an account.json and config.json
Copy, rename, and edit your account and configuration files before deploying the script.
- Copy or rename `src/accounts.example.json` to `src/accounts.json` and add your credentials
- Copy or rename `src/config.example.json` to `src/config.json` and customize your preferences.
@@ -33,9 +39,10 @@ Copy, rename, and edit your account and configuration files before deploying the
> Prior versions of accounts.json and config.json are not compatible with current release.
> [!WARNING]
> You must rebuild your script after making any changes to accounts.json and config.json.
> You must rebuild your script after making any changes to accounts.json and config.json.
### Build and run the script (bare metal version)
```bash
npm run pre-build
npm run build
@@ -43,6 +50,7 @@ npm run start
```
### Build and run the script (docker version)
```bash
docker compose up -d
```
@@ -53,6 +61,7 @@ docker compose up -d
> [!TIP]
> When headeless, monitor logs with `docker logs microsoft-rewards-script` (for example, to view passwordless codes), or enable a webhook service in the `src/config.json`.
---
## Nix Setup
@@ -65,86 +74,91 @@ If using Nix: `bash scripts/nix/run.sh`
Edit `src/config.json` to customize behavior. Below are all currently available options.
> [!WARNING]
> [!WARNING]
> Rebuild the script after all changes.
### Core
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `baseURL` | string | `"https://rewards.bing.com"` | Microsoft Rewards base URL |
| `sessionPath` | string | `"sessions"` | Directory to store browser sessions |
| `headless` | boolean | `false` | Run browser invisibly |
| `runOnZeroPoints` | boolean | `false` | Run even when no points are available |
| `clusters` | number | `1` | Number of concurrent account clusters |
| `errorDiagnostics` | boolean | `false` | Enable error diagnostics |
| `searchOnBingLocalQueries` | boolean | `false` | Use local query list |
| `globalTimeout` | string | `"30sec"` | Timeout for all actions |
| Setting | Type | Default | Description |
| -------------------------- | ------- | ---------------------------- | ------------------------------------- |
| `baseURL` | string | `"https://rewards.bing.com"` | Microsoft Rewards base URL |
| `sessionPath` | string | `"sessions"` | Directory to store browser sessions |
| `headless` | boolean | `false` | Run browser invisibly |
| `runOnZeroPoints` | boolean | `false` | Run even when no points are available |
| `clusters` | number | `1` | Number of concurrent account clusters |
| `errorDiagnostics` | boolean | `false` | Enable error diagnostics |
| `searchOnBingLocalQueries` | boolean | `false` | Use local query list |
| `globalTimeout` | string | `"30sec"` | Timeout for all actions |
> [!CAUTION]
> Set `headless` to `true` when using docker
### Workers
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `workers.doDailySet` | boolean | `true` | Complete daily set |
| `workers.doSpecialPromotions` | boolean | `true` | Complete special promotions |
| `workers.doMorePromotions` | boolean | `true` | Complete more promotions |
| `workers.doPunchCards` | boolean | `true` | Complete punchcards |
| `workers.doAppPromotions` | boolean | `true` | Complete app promotions |
| `workers.doDesktopSearch` | boolean | `true` | Perform desktop searches |
| `workers.doMobileSearch` | boolean | `true` | Perform mobile searches |
| `workers.doDailyCheckIn` | boolean | `true` | Complete daily check-in |
| `workers.doReadToEarn` | boolean | `true` | Complete Read-to-Earn |
| Setting | Type | Default | Description |
| ----------------------------- | ------- | ------- | --------------------------- |
| `workers.doDailySet` | boolean | `true` | Complete daily set |
| `workers.doSpecialPromotions` | boolean | `true` | Complete special promotions |
| `workers.doMorePromotions` | boolean | `true` | Complete more promotions |
| `workers.doPunchCards` | boolean | `true` | Complete punchcards |
| `workers.doAppPromotions` | boolean | `true` | Complete app promotions |
| `workers.doDesktopSearch` | boolean | `true` | Perform desktop searches |
| `workers.doMobileSearch` | boolean | `true` | Perform mobile searches |
| `workers.doDailyCheckIn` | boolean | `true` | Complete daily check-in |
| `workers.doReadToEarn` | boolean | `true` | Complete Read-to-Earn |
### Search Settings
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `searchSettings.scrollRandomResults` | boolean | `false` | Scroll randomly on results |
| `searchSettings.clickRandomResults` | boolean | `false` | Click random links |
| `searchSettings.parallelSearching` | boolean | `true` | Run searches in parallel |
| `searchSettings.queryEngines` | string[] | `["google", "wikipedia", "reddit", "local"]` | Query engines to use |
| `searchSettings.searchResultVisitTime` | string | `"10sec"` | Time to spend on each search result |
| `searchSettings.searchDelay.min` | string | `"30sec"` | Minimum delay between searches |
| `searchSettings.searchDelay.max` | string | `"1min"` | Maximum delay between searches |
| `searchSettings.readDelay.min` | string | `"30sec"` | Minimum delay for reading |
| `searchSettings.readDelay.max` | string | `"1min"` | Maximum delay for reading |
| Setting | Type | Default | Description |
| -------------------------------------- | -------- | -------------------------------------------- | ----------------------------------- |
| `searchSettings.scrollRandomResults` | boolean | `false` | Scroll randomly on results |
| `searchSettings.clickRandomResults` | boolean | `false` | Click random links |
| `searchSettings.parallelSearching` | boolean | `true` | Run searches in parallel |
| `searchSettings.queryEngines` | string[] | `["google", "wikipedia", "reddit", "local"]` | Query engines to use |
| `searchSettings.searchResultVisitTime` | string | `"10sec"` | Time to spend on each search result |
| `searchSettings.searchDelay.min` | string | `"30sec"` | Minimum delay between searches |
| `searchSettings.searchDelay.max` | string | `"1min"` | Maximum delay between searches |
| `searchSettings.readDelay.min` | string | `"30sec"` | Minimum delay for reading |
| `searchSettings.readDelay.max` | string | `"1min"` | Maximum delay for reading |
### Logging
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `debugLogs` | boolean | `false` | Enable debug logging |
| `consoleLogFilter.enabled` | boolean | `false` | Enable console log filtering |
| `consoleLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `consoleLogFilter.levels` | string[] | `["error", "warn"]` | Log levels to filter |
| `consoleLogFilter.keywords` | string[] | `["starting account"]` | Keywords to filter |
| `consoleLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
| Setting | Type | Default | Description |
| -------------------------------- | -------- | ---------------------- | --------------------------------- |
| `debugLogs` | boolean | `false` | Enable debug logging |
| `consoleLogFilter.enabled` | boolean | `false` | Enable console log filtering |
| `consoleLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `consoleLogFilter.levels` | string[] | `["error", "warn"]` | Log levels to filter |
| `consoleLogFilter.keywords` | string[] | `["starting account"]` | Keywords to filter |
| `consoleLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
### Proxy
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `proxy.queryEngine` | boolean | `true` | Proxy query engine requests |
| Setting | Type | Default | Description |
| ------------------- | ------- | ------- | --------------------------- |
| `proxy.queryEngine` | boolean | `true` | Proxy query engine requests |
### Webhooks
| Setting | Type | Default | Description |
|----------|------|----------|-------------|
| `webhook.discord.enabled` | boolean | `false` | Enable Discord webhook |
| `webhook.discord.url` | string | `""` | Discord webhook URL |
| `webhook.ntfy.enabled` | boolean | `false` | Enable ntfy notifications |
| `webhook.ntfy.url` | string | `""` | ntfy server URL |
| `webhook.ntfy.topic` | string | `""` | ntfy topic |
| `webhook.ntfy.token` | string | `""` | ntfy authentication token |
| `webhook.ntfy.title` | string | `"Microsoft-Rewards-Script"` | Notification title |
| `webhook.ntfy.tags` | string[] | `["bot", "notify"]` | Notification tags |
| `webhook.ntfy.priority` | number | `3` | Notification priority (1-5) |
| `webhook.webhookLogFilter.enabled` | boolean | `false` | Enable webhook log filtering |
| `webhook.webhookLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `webhook.webhookLogFilter.levels` | string[] | `["error"]` | Log levels to send |
| `webhook.webhookLogFilter.keywords` | string[] | `["starting account", "select number", "collected"]` | Keywords to filter |
| `webhook.webhookLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
| Setting | Type | Default | Description |
| ---------------------------------------- | -------- | ---------------------------------------------------- | --------------------------------- |
| `webhook.discord.enabled` | boolean | `false` | Enable Discord webhook |
| `webhook.discord.url` | string | `""` | Discord webhook URL |
| `webhook.ntfy.enabled` | boolean | `false` | Enable ntfy notifications |
| `webhook.ntfy.url` | string | `""` | ntfy server URL |
| `webhook.ntfy.topic` | string | `""` | ntfy topic |
| `webhook.ntfy.token` | string | `""` | ntfy authentication token |
| `webhook.ntfy.title` | string | `"Microsoft-Rewards-Script"` | Notification title |
| `webhook.ntfy.tags` | string[] | `["bot", "notify"]` | Notification tags |
| `webhook.ntfy.priority` | number | `3` | Notification priority (1-5) |
| `webhook.webhookLogFilter.enabled` | boolean | `false` | Enable webhook log filtering |
| `webhook.webhookLogFilter.mode` | string | `"whitelist"` | Filter mode (whitelist/blacklist) |
| `webhook.webhookLogFilter.levels` | string[] | `["error"]` | Log levels to send |
| `webhook.webhookLogFilter.keywords` | string[] | `["starting account", "select number", "collected"]` | Keywords to filter |
| `webhook.webhookLogFilter.regexPatterns` | string[] | `[]` | Regex patterns for filtering |
> [!WARNING]
> **NTFY** users set the `webhookLogFilter` to `enabled`, or you will receive push notifications for *all* logs.
> **NTFY** users set the `webhookLogFilter` to `enabled`, or you will receive push notifications for _all_ logs.
> When enabled, only account start, 2FA codes, and account completion summaries are delivered as push notifcations.
> Customize which notifications you receive with the `keywords` options.
@@ -152,7 +166,7 @@ Edit `src/config.json` to customize behavior. Below are all currently available
## Account Setup
Edit `src/accounts.json`.
Edit `src/accounts.json`.
> [!WARNING]
> The file is a **flat array** of accounts, not `{ "accounts": [ ... ] }`.
@@ -208,10 +222,11 @@ Edit `src/accounts.json`.
> When using 2FA login, adding your `totpSecret` will enable the script to automatically generate and enter the timed 6 digit code to login. To get your `totpSecret` in your Microsoft Security settings, click 'Manage how you sign in'. Add Authenticator app, when shown the QR code, select 'enter code manually'. Use this code in the `accounts.json`.
---
## Troubleshooting
> [!TIP]
> Most login issues can be fixed by deleting your /sessions folder, and redeploying the script
> Most login issues can be fixed by deleting your /sessions folder, and redeploying the script
---

View File

@@ -165,4 +165,4 @@ async function main() {
})
}
main()
main()

View File

@@ -1,12 +1,6 @@
import path from 'path'
import fs from 'fs'
import {
getDirname,
getProjectRoot,
log,
loadJsonFile,
safeRemoveDirectory
} from '../utils.js'
import { getDirname, getProjectRoot, log, loadJsonFile, safeRemoveDirectory } from '../utils.js'
const __dirname = getDirname(import.meta.url)
const projectRoot = getProjectRoot(__dirname)
@@ -64,4 +58,4 @@ if (!success) {
process.exit(1)
}
log('INFO', 'Done.')
log('INFO', 'Done.')

View File

@@ -94,10 +94,7 @@ export function loadJsonFile(possiblePaths, required = true) {
export function loadConfig(projectRoot, isDev = false) {
const possiblePaths = isDev
? [path.join(projectRoot, 'src', 'config.json')]
: [
path.join(projectRoot, 'dist', 'config.json'),
path.join(projectRoot, 'config.json')
]
: [path.join(projectRoot, 'dist', 'config.json'), path.join(projectRoot, 'config.json')]
const result = loadJsonFile(possiblePaths, true)
@@ -121,17 +118,20 @@ export function loadAccounts(projectRoot, isDev = false) {
const possiblePaths = isDev
? [path.join(projectRoot, 'src', 'accounts.dev.json')]
: [
path.join(projectRoot, 'dist', 'accounts.json'),
path.join(projectRoot, 'accounts.json'),
path.join(projectRoot, 'accounts.example.json')
]
path.join(projectRoot, 'dist', 'accounts.json'),
path.join(projectRoot, 'accounts.json'),
path.join(projectRoot, 'accounts.example.json')
]
return loadJsonFile(possiblePaths, true)
}
export function findAccountByEmail(accounts, email) {
if (!email || typeof email !== 'string') return null
return accounts.find(a => a?.email && typeof a.email === 'string' && a.email.toLowerCase() === email.toLowerCase()) || null
return (
accounts.find(a => a?.email && typeof a.email === 'string' && a.email.toLowerCase() === email.toLowerCase()) ||
null
)
}
export function getRuntimeBase(projectRoot, isDev = false) {
@@ -266,4 +266,4 @@ export function safeRemoveDirectory(dirPath, projectRoot) {
log('ERROR', `Error: ${error.message}`)
return false
}
}
}

View File

@@ -37,4 +37,4 @@
"desktop": false
}
}
]
]

View File

@@ -1,5 +1,5 @@
import type { BrowserContext, Cookie } from 'patchright'
import type { AxiosRequestConfig, AxiosResponse } from 'axios'
import type { AxiosRequestConfig } from 'axios'
import type { MicrosoftRewardsBot } from '../index'
import { saveSessionData } from '../util/Load'
@@ -23,45 +23,56 @@ export default class BrowserFunc {
*/
async getDashboardData(): Promise<DashboardData> {
try {
const allowedDomains = ['bing.com', 'live.com', 'microsoftonline.com'];
const cookieHeader = [
...new Map(
this.bot.cookies.mobile
.filter(
(c: { name: string; value: string; domain?: string }) =>
typeof c.domain === 'string' &&
allowedDomains.some(d =>
c.domain && c.domain.toLowerCase().endsWith(d)
)
)
.map(c => [c.name, c]) // dedupe by name, keep last
).values()
]
.map(c => `${c.name}=${c.value}`)
.join('; ');
const request: AxiosRequestConfig = {
url: 'https://rewards.bing.com/api/getuserinfo?type=1',
method: 'GET',
headers: {
...(this.bot.fingerprint?.headers ?? {}),
Cookie: cookieHeader,
Cookie: this.buildCookieHeader(this.bot.cookies.mobile, [
'bing.com',
'live.com',
'microsoftonline.com'
]),
Referer: 'https://rewards.bing.com/',
Origin: 'https://rewards.bing.com'
}
}
const response = await this.bot.axios.request(request)
return response.data.dashboard as DashboardData
if (response.data?.dashboard) {
return response.data.dashboard as DashboardData
}
throw new Error('Dashboard data missing from API response')
} catch (error) {
this.bot.logger.info(
this.bot.isMobile,
'GET-DASHBOARD-DATA',
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}`
)
throw error
this.bot.logger.warn(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'API failed, trying HTML fallback')
// Try using script from dashboard page
try {
const request: AxiosRequestConfig = {
url: this.bot.config.baseURL,
method: 'GET',
headers: {
...(this.bot.fingerprint?.headers ?? {}),
Cookie: this.buildCookieHeader(this.bot.cookies.mobile),
Referer: 'https://rewards.bing.com/',
Origin: 'https://rewards.bing.com'
}
}
const response = await this.bot.axios.request(request)
const match = response.data.match(/var\s+dashboard\s*=\s*({.*?});/s)
if (!match?.[1]) {
throw new Error('Dashboard script not found in HTML')
}
return JSON.parse(match[1]) as DashboardData
} catch (fallbackError) {
// If both fail
this.bot.logger.error(this.bot.isMobile, 'GET-DASHBOARD-DATA', 'Failed to get dashboard data')
throw fallbackError
}
}
}
@@ -84,7 +95,7 @@ export default class BrowserFunc {
const response = await this.bot.axios.request(request)
return response.data as AppDashboardData
} catch (error) {
this.bot.logger.info(
this.bot.logger.error(
this.bot.isMobile,
'GET-APP-DASHBOARD-DATA',
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}`
@@ -112,7 +123,7 @@ export default class BrowserFunc {
const response = await this.bot.axios.request(request)
return response.data as XboxDashboardData
} catch (error) {
this.bot.logger.info(
this.bot.logger.error(
this.bot.isMobile,
'GET-XBOX-DASHBOARD-DATA',
`Error fetching dashboard data: ${error instanceof Error ? error.message : String(error)}`
@@ -308,141 +319,21 @@ export default class BrowserFunc {
}
}
mergeCookies(response: AxiosResponse, currentCookieHeader: string = '', whitelist?: string[]): string {
const cookieMap = new Map<string, string>(
currentCookieHeader
.split(';')
.map(pair => pair.split('=').map(s => s.trim()))
.filter(([name, value]) => name && value)
.map(([name, value]) => [name, value] as [string, string])
)
const setCookieList = [response.headers['set-cookie']].flat().filter(Boolean) as string[]
const cookiesByName = new Map(this.bot.cookies.mobile.map(c => [c.name, c]))
for (const setCookie of setCookieList) {
const [nameValue, ...attributes] = setCookie.split(';').map(s => s.trim())
if (!nameValue) continue
const [name, value] = nameValue.split('=').map(s => s.trim())
if (!name) continue
if (whitelist && !whitelist?.includes(name)) {
continue
}
const attrs = this.parseAttributes(attributes)
const existing = cookiesByName.get(name)
if (!value) {
if (existing) {
cookiesByName.delete(name)
this.bot.cookies.mobile = this.bot.cookies.mobile.filter(c => c.name !== name)
}
cookieMap.delete(name)
continue
}
if (attrs.expires !== undefined && attrs.expires < Date.now() / 1000) {
if (existing) {
cookiesByName.delete(name)
this.bot.cookies.mobile = this.bot.cookies.mobile.filter(c => c.name !== name)
}
cookieMap.delete(name)
continue
}
cookieMap.set(name, value)
if (existing) {
this.updateCookie(existing, value, attrs)
} else {
this.bot.cookies.mobile.push(this.createCookie(name, value, attrs))
}
}
return Array.from(cookieMap, ([name, value]) => `${name}=${value}`).join('; ')
}
private parseAttributes(attributes: string[]) {
const attrs: {
domain?: string
path?: string
expires?: number
httpOnly?: boolean
secure?: boolean
sameSite?: Cookie['sameSite']
} = {}
for (const attr of attributes) {
const [key, val] = attr.split('=').map(s => s?.trim())
const lowerKey = key?.toLowerCase()
switch (lowerKey) {
case 'domain':
case 'path': {
if (val) attrs[lowerKey] = val
break
}
case 'expires': {
if (val) {
const ts = Date.parse(val)
if (!isNaN(ts)) attrs.expires = Math.floor(ts / 1000)
}
break
}
case 'max-age': {
if (val) {
const maxAge = Number(val)
if (!isNaN(maxAge)) attrs.expires = Math.floor(Date.now() / 1000) + maxAge
}
break
}
case 'httponly': {
attrs.httpOnly = true
break
}
case 'secure': {
attrs.secure = true
break
}
case 'samesite': {
const normalized = val?.toLowerCase()
if (normalized && ['lax', 'strict', 'none'].includes(normalized)) {
attrs.sameSite = (normalized.charAt(0).toUpperCase() +
normalized.slice(1)) as Cookie['sameSite']
}
break
}
}
}
return attrs
}
private updateCookie(cookie: Cookie, value: string, attrs: ReturnType<typeof this.parseAttributes>) {
cookie.value = value
if (attrs.domain) cookie.domain = attrs.domain
if (attrs.path) cookie.path = attrs.path
//if (attrs.expires !== undefined) cookie.expires = attrs.expires
//if (attrs.httpOnly) cookie.httpOnly = true
//if (attrs.secure) cookie.secure = true
//if (attrs.sameSite) cookie.sameSite = attrs.sameSite
}
private createCookie(name: string, value: string, attrs: ReturnType<typeof this.parseAttributes>): Cookie {
return {
name,
value,
domain: attrs.domain || '.bing.com',
path: attrs.path || '/'
/*
...(attrs.expires !== undefined && { expires: attrs.expires }),
...(attrs.httpOnly && { httpOnly: true }),
...(attrs.secure && { secure: true }),
...(attrs.sameSite && { sameSite: attrs.sameSite })
*/
} as Cookie
buildCookieHeader(cookies: Cookie[], allowedDomains?: string[]): string {
return [
...new Map(
cookies
.filter(c => {
if (!allowedDomains || allowedDomains.length === 0) return true
return (
typeof c.domain === 'string' &&
allowedDomains.some(d => c.domain.toLowerCase().endsWith(d.toLowerCase()))
)
})
.map(c => [c.name, c])
).values()
]
.map(c => `${c.name}=${c.value}`)
.join('; ')
}
}

View File

@@ -220,14 +220,14 @@ export default class BrowserUtils {
)
// Wait for selector to exist before clicking
await page.waitForSelector(selector, { timeout: 10000 })
await page.waitForSelector(selector, { timeout: 1000 }).catch(() => {})
const cursor = createCursor(page as any)
await cursor.click(selector, options)
return true
} catch (error) {
this.bot.logger.error(
this.bot.logger.warn(
this.bot.isMobile,
'GHOST-CLICK',
`Failed for ${selector}: ${error instanceof Error ? error.message : String(error)}`

View File

@@ -78,7 +78,7 @@ export class Login {
try {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Starting login process')
await page.goto('https://www.bing.com/rewards/dashboard', { waitUntil: 'domcontentloaded' }).catch(() => { })
await page.goto('https://www.bing.com/rewards/dashboard', { waitUntil: 'domcontentloaded' }).catch(() => {})
await this.bot.utils.wait(2000)
await this.bot.browser.utils.reloadBadPage(page)
await this.bot.browser.utils.disableFido(page)
@@ -154,7 +154,7 @@ export class Login {
}
private async detectCurrentState(page: Page, account?: Account): Promise<LoginState> {
await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => { })
await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => {})
const url = new URL(page.url())
this.bot.logger.debug(this.bot.isMobile, 'DETECT-STATE', `Current URL: ${url.hostname}${url.pathname}`)
@@ -190,7 +190,7 @@ export class Login {
[this.selectors.totpInput, '2FA_TOTP'],
[this.selectors.totpInputOld, '2FA_TOTP'],
[this.selectors.otpCodeEntry, 'OTP_CODE_ENTRY'], // PR 450
[this.selectors.otpInput, 'OTP_CODE_ENTRY'] // My Fix
[this.selectors.otpInput, 'OTP_CODE_ENTRY'] // My Fix
]
const results = await Promise.all(
@@ -435,7 +435,7 @@ export class Login {
waitUntil: 'domcontentloaded',
timeout: 10000
})
.catch(() => { })
.catch(() => {})
await this.bot.utils.wait(3000)
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Recovery navigation successful')
return true
@@ -446,7 +446,7 @@ export class Login {
waitUntil: 'domcontentloaded',
timeout: 10000
})
.catch(() => { })
.catch(() => {})
await this.bot.utils.wait(3000)
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Fallback navigation successful')
return true
@@ -502,7 +502,11 @@ export class Login {
}
case 'OTP_CODE_ENTRY': {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'OTP code entry page detected, attempting to find password option')
this.bot.logger.info(
this.bot.isMobile,
'LOGIN',
'OTP code entry page detected, attempting to find password option'
)
// My Fix: Click "Use your password" footer
const footerLink = await page
@@ -552,7 +556,7 @@ export class Login {
private async finalizeLogin(page: Page, email: string) {
this.bot.logger.info(this.bot.isMobile, 'LOGIN', 'Finalizing login')
await page.goto(this.bot.config.baseURL, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => { })
await page.goto(this.bot.config.baseURL, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => {})
const loginRewardsSuccess = new URL(page.url()).hostname === 'rewards.bing.com'
if (loginRewardsSuccess) {
@@ -583,7 +587,7 @@ export class Login {
this.bot.logger.info(this.bot.isMobile, 'LOGIN-BING', 'Verifying Bing session')
try {
await page.goto(url, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => { })
await page.goto(url, { waitUntil: 'networkidle', timeout: 10000 }).catch(() => {})
for (let i = 0; i < loopMax; i++) {
if (page.isClosed()) break
@@ -605,7 +609,7 @@ export class Login {
)
if (atBingHome) {
await this.bot.browser.utils.tryDismissAllMessages(page).catch(() => { })
await this.bot.browser.utils.tryDismissAllMessages(page).catch(() => {})
const signedIn = await page
.waitForSelector(this.selectors.bingProfile, { timeout: 3000 })
@@ -641,7 +645,7 @@ export class Login {
try {
await page
.goto(`${this.bot.config.baseURL}?_=${Date.now()}`, { waitUntil: 'networkidle', timeout: 10000 })
.catch(() => { })
.catch(() => {})
for (let i = 0; i < loopMax; i++) {
if (page.isClosed()) break

View File

@@ -45,7 +45,7 @@ export function promptInput(options: PromptOptions): Promise<string | null> {
export async function getSubtitleMessage(page: Page): Promise<string | null> {
const message = await page
.waitForSelector('[data-testid="subtitle"]', { state: 'visible', timeout: 1000 })
.waitForSelector('[data-testid="subtitle"], div#oneTimeCodeDescription', { state: 'visible', timeout: 1000 })
.catch(() => null)
if (!message) return null

View File

@@ -22,12 +22,7 @@
"scrollRandomResults": false,
"clickRandomResults": false,
"parallelSearching": true,
"queryEngines": [
"google",
"wikipedia",
"reddit",
"local"
],
"queryEngines": ["google", "wikipedia", "reddit", "local"],
"searchResultVisitTime": "10sec",
"searchDelay": {
"min": "30sec",
@@ -42,13 +37,8 @@
"consoleLogFilter": {
"enabled": false,
"mode": "whitelist",
"levels": [
"error",
"warn"
],
"keywords": [
"starting account"
],
"levels": ["error", "warn"],
"keywords": ["starting account"],
"regexPatterns": []
},
"proxy": {
@@ -65,24 +55,15 @@
"topic": "",
"token": "",
"title": "Microsoft-Rewards-Script",
"tags": [
"bot",
"notify"
],
"tags": ["bot", "notify"],
"priority": 3
},
"webhookLogFilter": {
"enabled": false,
"mode": "whitelist",
"levels": [
"error"
],
"keywords": [
"starting account",
"select number",
"collected"
],
"levels": ["error"],
"keywords": ["starting account", "select number", "collected"],
"regexPatterns": []
}
}
}
}

View File

@@ -77,7 +77,6 @@ export class QueryCore {
const baseTopics = this.normalizeAndDedupe(topicLists.flat())
if (!baseTopics.length) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'No base topics found (all sources empty)')
return []
}
@@ -115,7 +114,6 @@ export class QueryCore {
)
if (!finalQueries.length) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'QUERY-MANAGER', 'finalQueries deduped to 0')
return []
}
@@ -124,7 +122,6 @@ export class QueryCore {
return finalQueries
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'QUERY-MANAGER', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'QUERY-MANAGER',
@@ -216,7 +213,6 @@ export class QueryCore {
const response = await this.bot.axios.request(request, this.bot.config.proxy.queryEngine)
const trendsData = this.extractJsonFromResponse(response.data)
if (!trendsData) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No queries')
this.bot.logger.debug(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No trendsData parsed from response')
return []
}
@@ -234,7 +230,6 @@ export class QueryCore {
})
}
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-GOOGLE-TRENDS', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-GOOGLE-TRENDS',
@@ -277,7 +272,6 @@ export class QueryCore {
response.data.suggestionGroups?.[0]?.searchSuggestions?.map((x: { query: any }) => x.query) ?? []
if (!suggestions.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-SUGGESTIONS', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-SUGGESTIONS',
@@ -287,7 +281,6 @@ export class QueryCore {
return suggestions
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-SUGGESTIONS', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-SUGGESTIONS',
@@ -314,7 +307,6 @@ export class QueryCore {
const out = Array.isArray(related) ? related : []
if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-RELATED', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-RELATED',
@@ -324,7 +316,6 @@ export class QueryCore {
return out
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-RELATED', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-RELATED',
@@ -359,7 +350,6 @@ export class QueryCore {
) ?? []
if (!topics.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-TRENDING', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-TRENDING',
@@ -369,7 +359,6 @@ export class QueryCore {
return topics
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-BING-TRENDING', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-BING-TRENDING',
@@ -402,7 +391,6 @@ export class QueryCore {
const out = articles.slice(0, 50).map(a => a.article.replace(/_/g, ' '))
if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-WIKIPEDIA-TRENDING', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-WIKIPEDIA-TRENDING',
@@ -412,7 +400,6 @@ export class QueryCore {
return out
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-WIKIPEDIA-TRENDING', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-WIKIPEDIA-TRENDING',
@@ -441,7 +428,6 @@ export class QueryCore {
const out = posts.filter(p => !p.data.over_18).map(p => p.data.title)
if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-REDDIT-TRENDING', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-REDDIT-TRENDING',
@@ -451,7 +437,6 @@ export class QueryCore {
return out
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-REDDIT', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-REDDIT',
@@ -476,7 +461,6 @@ export class QueryCore {
)
if (!out.length) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-LOCAL-QUERY-LIST', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-LOCAL-QUERY-LIST',
@@ -486,7 +470,6 @@ export class QueryCore {
return out
} catch (error) {
this.bot.logger.warn(this.bot.isMobile, 'SEARCH-LOCAL-QUERY-LIST', 'No queries')
this.bot.logger.debug(
this.bot.isMobile,
'SEARCH-LOCAL-QUERY-LIST',

View File

@@ -21,9 +21,9 @@ export class DoubleSearchPoints extends Workers {
return
}
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop)
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`)
.join('; ')
this.cookieHeader = this.bot.browser.func.buildCookieHeader(
this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
)
const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie']

View File

@@ -25,9 +25,9 @@ export class FindClippy extends Workers {
return
}
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop)
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`)
.join('; ')
this.cookieHeader = this.bot.browser.func.buildCookieHeader(
this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
)
const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie']

View File

@@ -23,9 +23,9 @@ export class Quiz extends Workers {
)
try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop)
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`)
.join('; ')
this.cookieHeader = this.bot.browser.func.buildCookieHeader(
this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
)
const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie']

View File

@@ -30,9 +30,9 @@ export class UrlReward extends Workers {
)
try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop)
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`)
.join('; ')
this.cookieHeader = this.bot.browser.func.buildCookieHeader(
this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
)
const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie']

View File

@@ -53,7 +53,7 @@ export class Search extends Workers {
queries = [...new Set(queries.map(q => q.trim()).filter(Boolean))]
this.bot.logger.debug(isMobile, 'SEARCH-BING', `Query pool ready | count=${queries.length}`)
this.bot.logger.info(isMobile, 'SEARCH-BING', `Search query pool ready | count=${queries.length}`)
// Go to bing
const targetUrl = this.searchPageURL ? this.searchPageURL : this.bingHome
@@ -169,10 +169,10 @@ export class Search extends Workers {
const newPool = [...new Set(merged)]
queries = this.bot.utils.shuffleArray(newPool)
this.bot.logger.debug(
this.bot.logger.info(
isMobile,
'SEARCH-BING-EXTRA',
`New query pool generated | count=${queries.length}`
`New search query pool generated | count=${queries.length}`
)
for (const query of queries) {

View File

@@ -32,9 +32,9 @@ export class SearchOnBing extends Workers {
)
try {
this.cookieHeader = (this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop)
.map((c: { name: string; value: string }) => `${c.name}=${c.value}`)
.join('; ')
this.cookieHeader = this.bot.browser.func.buildCookieHeader(
this.bot.isMobile ? this.bot.cookies.mobile : this.bot.cookies.desktop
)
const fingerprintHeaders = { ...this.bot.fingerprint.headers }
delete fingerprintHeaders['Cookie']

View File

@@ -579,4 +579,4 @@
"meilleurs cafés près de chez moi"
]
}
]
]

View File

@@ -579,4 +579,4 @@
"meilleurs cafés près de chez moi"
]
}
]
]

View File

@@ -113,4 +113,4 @@
"how to clear browser cache",
"popular tech podcasts",
"how to stay motivated"
]
]

View File

@@ -45,7 +45,7 @@ function formatMessage(message: string | Error): string {
}
export class Logger {
constructor(private bot: MicrosoftRewardsBot) { }
constructor(private bot: MicrosoftRewardsBot) {}
info(isMobile: Platform, title: string, message: string, color?: ColorKey) {
return this.baseLog('info', isMobile, title, message, color)
@@ -180,7 +180,7 @@ export class Logger {
isMatch = true
break
}
} catch { }
} catch {}
}
}

View File

@@ -40,12 +40,8 @@
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
/* Module Resolution Options */
"moduleResolution": "node" /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */,
"types": [
"node"
],
"typeRoots": [
"./node_modules/@types"
],
"types": ["node"],
"typeRoots": ["./node_modules/@types"],
// Keep explicit typeRoots to ensure resolution in environments that don't auto-detect before full install.
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
@@ -76,7 +72,5 @@
"src/functions/bing-search-activity-queries.json",
"src/functions/search-queries.json"
],
"exclude": [
"node_modules"
]
}
"exclude": ["node_modules"]
}